var/home/core/zuul-output/0000755000175000017500000000000015117727021014530 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015117762117015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000007222776715117762107017730 0ustar rootrootDec 15 06:51:13 crc systemd[1]: Starting Kubernetes Kubelet... Dec 15 06:51:13 crc restorecon[4822]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:13 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:14 crc restorecon[4822]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 15 06:51:14 crc restorecon[4822]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 15 06:51:14 crc kubenswrapper[4876]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 15 06:51:14 crc kubenswrapper[4876]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 15 06:51:14 crc kubenswrapper[4876]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 15 06:51:14 crc kubenswrapper[4876]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 15 06:51:14 crc kubenswrapper[4876]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 15 06:51:14 crc kubenswrapper[4876]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.538141 4876 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541007 4876 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541025 4876 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541030 4876 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541035 4876 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541039 4876 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541045 4876 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541049 4876 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541057 4876 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541062 4876 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541067 4876 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541072 4876 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541077 4876 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541081 4876 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541085 4876 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541089 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541093 4876 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541111 4876 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541117 4876 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541121 4876 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541124 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541128 4876 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541132 4876 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541136 4876 feature_gate.go:330] unrecognized feature gate: Example Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541139 4876 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541143 4876 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541146 4876 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541150 4876 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541154 4876 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541158 4876 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541162 4876 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541167 4876 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541172 4876 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541177 4876 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541181 4876 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541186 4876 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541191 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541195 4876 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541199 4876 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541203 4876 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541207 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541211 4876 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541215 4876 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541219 4876 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541223 4876 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541229 4876 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541233 4876 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541238 4876 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541243 4876 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541247 4876 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541251 4876 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541255 4876 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541259 4876 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541263 4876 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541267 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541271 4876 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541275 4876 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541279 4876 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541283 4876 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541287 4876 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541291 4876 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541295 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541298 4876 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541302 4876 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541305 4876 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541309 4876 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541312 4876 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541316 4876 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541319 4876 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541323 4876 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541327 4876 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.541332 4876 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541578 4876 flags.go:64] FLAG: --address="0.0.0.0" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541589 4876 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541597 4876 flags.go:64] FLAG: --anonymous-auth="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541605 4876 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541611 4876 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541617 4876 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541622 4876 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541628 4876 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541633 4876 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541637 4876 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541642 4876 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541646 4876 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541650 4876 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541654 4876 flags.go:64] FLAG: --cgroup-root="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541658 4876 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541662 4876 flags.go:64] FLAG: --client-ca-file="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541667 4876 flags.go:64] FLAG: --cloud-config="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541671 4876 flags.go:64] FLAG: --cloud-provider="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541676 4876 flags.go:64] FLAG: --cluster-dns="[]" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541681 4876 flags.go:64] FLAG: --cluster-domain="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541685 4876 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541689 4876 flags.go:64] FLAG: --config-dir="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541694 4876 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541699 4876 flags.go:64] FLAG: --container-log-max-files="5" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541705 4876 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541710 4876 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541714 4876 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541718 4876 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541722 4876 flags.go:64] FLAG: --contention-profiling="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541726 4876 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541730 4876 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541734 4876 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541738 4876 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541744 4876 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541749 4876 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541753 4876 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541757 4876 flags.go:64] FLAG: --enable-load-reader="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541762 4876 flags.go:64] FLAG: --enable-server="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541766 4876 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541773 4876 flags.go:64] FLAG: --event-burst="100" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541777 4876 flags.go:64] FLAG: --event-qps="50" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541781 4876 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541786 4876 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541790 4876 flags.go:64] FLAG: --eviction-hard="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541796 4876 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541801 4876 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541805 4876 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541810 4876 flags.go:64] FLAG: --eviction-soft="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541814 4876 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541818 4876 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541823 4876 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541827 4876 flags.go:64] FLAG: --experimental-mounter-path="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541831 4876 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541835 4876 flags.go:64] FLAG: --fail-swap-on="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541840 4876 flags.go:64] FLAG: --feature-gates="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541845 4876 flags.go:64] FLAG: --file-check-frequency="20s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541849 4876 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541854 4876 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541859 4876 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541863 4876 flags.go:64] FLAG: --healthz-port="10248" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541868 4876 flags.go:64] FLAG: --help="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541872 4876 flags.go:64] FLAG: --hostname-override="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541876 4876 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541881 4876 flags.go:64] FLAG: --http-check-frequency="20s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541885 4876 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541889 4876 flags.go:64] FLAG: --image-credential-provider-config="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541894 4876 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541898 4876 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541902 4876 flags.go:64] FLAG: --image-service-endpoint="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541907 4876 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541912 4876 flags.go:64] FLAG: --kube-api-burst="100" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541917 4876 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541922 4876 flags.go:64] FLAG: --kube-api-qps="50" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541926 4876 flags.go:64] FLAG: --kube-reserved="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541930 4876 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541934 4876 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541939 4876 flags.go:64] FLAG: --kubelet-cgroups="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541943 4876 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541947 4876 flags.go:64] FLAG: --lock-file="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541951 4876 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541956 4876 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541960 4876 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541967 4876 flags.go:64] FLAG: --log-json-split-stream="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541971 4876 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541975 4876 flags.go:64] FLAG: --log-text-split-stream="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541980 4876 flags.go:64] FLAG: --logging-format="text" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541984 4876 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541989 4876 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541993 4876 flags.go:64] FLAG: --manifest-url="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.541998 4876 flags.go:64] FLAG: --manifest-url-header="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542004 4876 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542008 4876 flags.go:64] FLAG: --max-open-files="1000000" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542013 4876 flags.go:64] FLAG: --max-pods="110" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542018 4876 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542022 4876 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542027 4876 flags.go:64] FLAG: --memory-manager-policy="None" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542031 4876 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542036 4876 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542040 4876 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542044 4876 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542054 4876 flags.go:64] FLAG: --node-status-max-images="50" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542059 4876 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542063 4876 flags.go:64] FLAG: --oom-score-adj="-999" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542068 4876 flags.go:64] FLAG: --pod-cidr="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542072 4876 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542079 4876 flags.go:64] FLAG: --pod-manifest-path="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542083 4876 flags.go:64] FLAG: --pod-max-pids="-1" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542087 4876 flags.go:64] FLAG: --pods-per-core="0" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542092 4876 flags.go:64] FLAG: --port="10250" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542096 4876 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542100 4876 flags.go:64] FLAG: --provider-id="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542125 4876 flags.go:64] FLAG: --qos-reserved="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542129 4876 flags.go:64] FLAG: --read-only-port="10255" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542133 4876 flags.go:64] FLAG: --register-node="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542138 4876 flags.go:64] FLAG: --register-schedulable="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542142 4876 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542151 4876 flags.go:64] FLAG: --registry-burst="10" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542156 4876 flags.go:64] FLAG: --registry-qps="5" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542160 4876 flags.go:64] FLAG: --reserved-cpus="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542165 4876 flags.go:64] FLAG: --reserved-memory="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542170 4876 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542176 4876 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542181 4876 flags.go:64] FLAG: --rotate-certificates="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542185 4876 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542189 4876 flags.go:64] FLAG: --runonce="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542194 4876 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542198 4876 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542203 4876 flags.go:64] FLAG: --seccomp-default="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542207 4876 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542211 4876 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542215 4876 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542220 4876 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542224 4876 flags.go:64] FLAG: --storage-driver-password="root" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542232 4876 flags.go:64] FLAG: --storage-driver-secure="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542236 4876 flags.go:64] FLAG: --storage-driver-table="stats" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542240 4876 flags.go:64] FLAG: --storage-driver-user="root" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542244 4876 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542249 4876 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542253 4876 flags.go:64] FLAG: --system-cgroups="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542257 4876 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542264 4876 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542268 4876 flags.go:64] FLAG: --tls-cert-file="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542272 4876 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542277 4876 flags.go:64] FLAG: --tls-min-version="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542281 4876 flags.go:64] FLAG: --tls-private-key-file="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542285 4876 flags.go:64] FLAG: --topology-manager-policy="none" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542289 4876 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542294 4876 flags.go:64] FLAG: --topology-manager-scope="container" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542298 4876 flags.go:64] FLAG: --v="2" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542303 4876 flags.go:64] FLAG: --version="false" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542309 4876 flags.go:64] FLAG: --vmodule="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542316 4876 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542322 4876 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542439 4876 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542445 4876 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542449 4876 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542453 4876 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542456 4876 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542460 4876 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542464 4876 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542469 4876 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542473 4876 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542477 4876 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542481 4876 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542484 4876 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542489 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542493 4876 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542496 4876 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542500 4876 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542504 4876 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542507 4876 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542511 4876 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542514 4876 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542518 4876 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542522 4876 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542525 4876 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542529 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542532 4876 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542536 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542540 4876 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542543 4876 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542547 4876 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542551 4876 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542556 4876 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542559 4876 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542563 4876 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542568 4876 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542573 4876 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542580 4876 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542584 4876 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542588 4876 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542592 4876 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542596 4876 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542600 4876 feature_gate.go:330] unrecognized feature gate: Example Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542604 4876 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542607 4876 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542611 4876 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542616 4876 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542619 4876 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542623 4876 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542626 4876 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542630 4876 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542633 4876 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542637 4876 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542640 4876 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542644 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542647 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542651 4876 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542654 4876 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542658 4876 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542661 4876 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542665 4876 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542668 4876 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542672 4876 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542675 4876 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542682 4876 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542686 4876 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542690 4876 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542694 4876 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542698 4876 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542702 4876 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542705 4876 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542709 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.542713 4876 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.542864 4876 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.551017 4876 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.551073 4876 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551177 4876 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551185 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551190 4876 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551195 4876 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551199 4876 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551203 4876 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551207 4876 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551211 4876 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551214 4876 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551218 4876 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551222 4876 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551225 4876 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551229 4876 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551232 4876 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551236 4876 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551239 4876 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551243 4876 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551247 4876 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551250 4876 feature_gate.go:330] unrecognized feature gate: Example Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551254 4876 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551259 4876 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551263 4876 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551267 4876 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551273 4876 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551283 4876 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551287 4876 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551292 4876 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551297 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551304 4876 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551309 4876 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551313 4876 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551318 4876 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551323 4876 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551327 4876 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551333 4876 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551339 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551345 4876 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551349 4876 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551353 4876 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551356 4876 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551360 4876 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551363 4876 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551367 4876 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551371 4876 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551374 4876 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551378 4876 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551381 4876 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551385 4876 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551388 4876 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551392 4876 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551395 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551399 4876 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551402 4876 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551406 4876 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551411 4876 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551416 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551419 4876 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551423 4876 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551427 4876 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551430 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551434 4876 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551437 4876 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551441 4876 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551445 4876 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551449 4876 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551452 4876 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551456 4876 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551460 4876 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551465 4876 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551470 4876 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551474 4876 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.551481 4876 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551605 4876 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551614 4876 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551618 4876 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551623 4876 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551626 4876 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551630 4876 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551634 4876 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551638 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551642 4876 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551646 4876 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551650 4876 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551656 4876 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551660 4876 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551664 4876 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551668 4876 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551671 4876 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551675 4876 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551678 4876 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551682 4876 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551686 4876 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551690 4876 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551693 4876 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551696 4876 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551700 4876 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551704 4876 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551708 4876 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551711 4876 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551715 4876 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551719 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551722 4876 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551726 4876 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551729 4876 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551733 4876 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551736 4876 feature_gate.go:330] unrecognized feature gate: Example Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551740 4876 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551744 4876 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551748 4876 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551752 4876 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551755 4876 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551760 4876 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551765 4876 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551770 4876 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551775 4876 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551779 4876 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551783 4876 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551787 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551792 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551795 4876 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551800 4876 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551804 4876 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551807 4876 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551811 4876 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551815 4876 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551819 4876 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551824 4876 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551828 4876 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551832 4876 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551836 4876 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551840 4876 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551844 4876 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551848 4876 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551852 4876 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551855 4876 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551859 4876 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551863 4876 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551867 4876 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551872 4876 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551876 4876 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551883 4876 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551887 4876 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.551891 4876 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.551898 4876 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.552391 4876 server.go:940] "Client rotation is on, will bootstrap in background" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.558054 4876 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.558172 4876 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.558654 4876 server.go:997] "Starting client certificate rotation" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.558680 4876 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.559181 4876 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-29 13:10:18.112042506 +0000 UTC Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.559304 4876 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 342h19m3.552744421s for next certificate rotation Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.563044 4876 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.564936 4876 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.577306 4876 log.go:25] "Validated CRI v1 runtime API" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.593450 4876 log.go:25] "Validated CRI v1 image API" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.595605 4876 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.598600 4876 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-15-06-42-09-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.598637 4876 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.621578 4876 manager.go:217] Machine: {Timestamp:2025-12-15 06:51:14.618069816 +0000 UTC m=+0.189212827 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:a6c10a00-26cd-4256-831a-0419287771d9 BootID:9e406852-849d-435c-ab8a-94b3d3d795a3 Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:ea:39:90 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:ea:39:90 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:fc:1f:13 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:d1:98:9f Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:e7:f1:f7 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:27:d4:4c Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:92:49:db Speed:-1 Mtu:1496} {Name:ens7.44 MacAddress:52:54:00:8f:db:c7 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:2a:ef:99:d6:72:bb Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:06:bd:2f:17:2e:ea Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.622175 4876 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.622487 4876 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.625160 4876 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.625866 4876 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.625932 4876 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.626251 4876 topology_manager.go:138] "Creating topology manager with none policy" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.626267 4876 container_manager_linux.go:303] "Creating device plugin manager" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.626528 4876 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.626556 4876 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.626884 4876 state_mem.go:36] "Initialized new in-memory state store" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.627001 4876 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.627793 4876 kubelet.go:418] "Attempting to sync node with API server" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.627822 4876 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.627860 4876 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.627877 4876 kubelet.go:324] "Adding apiserver pod source" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.627892 4876 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.629788 4876 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.630470 4876 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.630909 4876 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.70:6443: connect: connection refused Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.630962 4876 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.70:6443: connect: connection refused Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.631052 4876 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.70:6443: connect: connection refused" logger="UnhandledError" Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.631069 4876 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.70:6443: connect: connection refused" logger="UnhandledError" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.631591 4876 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632351 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632386 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632399 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632411 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632428 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632438 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632449 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632466 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632479 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632492 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632509 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632521 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.632947 4876 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.633777 4876 server.go:1280] "Started kubelet" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.633768 4876 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.70:6443: connect: connection refused Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.634177 4876 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.634166 4876 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.634748 4876 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 15 06:51:14 crc systemd[1]: Started Kubernetes Kubelet. Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.636439 4876 server.go:460] "Adding debug handlers to kubelet server" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.636982 4876 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.637035 4876 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.636558 4876 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.70:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188150e54a81f2e5 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-15 06:51:14.633708261 +0000 UTC m=+0.204851182,LastTimestamp:2025-12-15 06:51:14.633708261 +0000 UTC m=+0.204851182,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.637230 4876 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 17:17:11.740694871 +0000 UTC Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.637511 4876 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 706h25m57.103195743s for next certificate rotation Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.637325 4876 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.637272 4876 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.638317 4876 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.638406 4876 factory.go:55] Registering systemd factory Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.638436 4876 factory.go:221] Registration of the systemd container factory successfully Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.637341 4876 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.638474 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" interval="200ms" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.638792 4876 factory.go:153] Registering CRI-O factory Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.638818 4876 factory.go:221] Registration of the crio container factory successfully Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.638901 4876 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.638937 4876 factory.go:103] Registering Raw factory Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.638960 4876 manager.go:1196] Started watching for new ooms in manager Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.639442 4876 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.70:6443: connect: connection refused Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.639572 4876 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.70:6443: connect: connection refused" logger="UnhandledError" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.640211 4876 manager.go:319] Starting recovery of all containers Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.661047 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.661855 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.662142 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.662287 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.662416 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.662614 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.662787 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.662915 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.663040 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.663204 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.663352 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.663477 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.663612 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.664227 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.664388 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.664609 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.664794 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.665010 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.665221 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.665360 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.665512 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.665745 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.665896 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.666021 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.666216 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.666748 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.667028 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.667233 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.667415 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.667656 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.667920 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.668158 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.668301 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.668423 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.668569 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.668706 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.668836 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.668983 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.669174 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.669308 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.669429 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.669551 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.670026 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.670372 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.670536 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.670657 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.670776 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.670904 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.671021 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.671184 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.671310 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.671426 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.671548 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.671691 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.671812 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.671931 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.672090 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.672249 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.672604 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.672766 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.672894 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.673013 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.673169 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.673320 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.673508 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.673682 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.673820 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.673950 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.674071 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.674256 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.674408 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.674576 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.668441 4876 manager.go:324] Recovery completed Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.674753 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.675042 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.675210 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.675340 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.675475 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.676217 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.676272 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.676301 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.676323 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.676352 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.677903 4876 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.677982 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678019 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678051 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678082 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678189 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678227 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678259 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678290 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678319 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678354 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678386 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678418 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678453 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678483 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678513 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678545 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678577 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678609 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678640 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678674 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678710 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678739 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678794 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678832 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678868 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678909 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678941 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.678976 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679007 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679039 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679071 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679140 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679178 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679211 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679243 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679275 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679318 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679348 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679392 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679427 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679456 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679487 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679516 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679545 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679576 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679610 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679638 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679675 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679707 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679736 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679765 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679800 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679828 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679860 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679890 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679920 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679950 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.679981 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680051 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680083 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680147 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680181 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680209 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680240 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680270 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680301 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680331 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680365 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680395 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680427 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680456 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680486 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680515 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680547 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680576 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680606 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680634 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680665 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680695 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680724 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680754 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680784 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680817 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680844 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680866 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680906 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680927 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680950 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680976 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.680999 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681021 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681045 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681069 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681091 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681173 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681205 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681229 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681261 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681295 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681324 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681350 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681378 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681409 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681439 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681470 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681500 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681530 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681559 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681588 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681685 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681717 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681747 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681781 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681810 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681847 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681880 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681914 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681943 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.681972 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.682003 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.682033 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.682065 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.682094 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.682198 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.682229 4876 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.682256 4876 reconstruct.go:97] "Volume reconstruction finished" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.682275 4876 reconciler.go:26] "Reconciler: start to sync state" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.687657 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.690139 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.690196 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.690210 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.691246 4876 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.691262 4876 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.691288 4876 state_mem.go:36] "Initialized new in-memory state store" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.701665 4876 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.703073 4876 policy_none.go:49] "None policy: Start" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.704112 4876 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.704175 4876 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.704212 4876 kubelet.go:2335] "Starting kubelet main sync loop" Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.704271 4876 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.704899 4876 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.704928 4876 state_mem.go:35] "Initializing new in-memory state store" Dec 15 06:51:14 crc kubenswrapper[4876]: W1215 06:51:14.705312 4876 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.70:6443: connect: connection refused Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.705385 4876 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.70:6443: connect: connection refused" logger="UnhandledError" Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.738550 4876 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.759901 4876 manager.go:334] "Starting Device Plugin manager" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.759961 4876 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.759977 4876 server.go:79] "Starting device plugin registration server" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.760483 4876 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.760508 4876 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.760957 4876 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.761156 4876 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.761176 4876 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.769818 4876 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.804895 4876 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.805057 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.806168 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.806217 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.806233 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.806455 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.806663 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.806741 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.807204 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.807229 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.807256 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.807410 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.807655 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.807707 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.807987 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.808023 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.808039 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.808748 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.808838 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.808882 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.808898 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.809042 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.809172 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.809223 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.809329 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.809363 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.810131 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.810153 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.810166 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.810181 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.810170 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.810317 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.810504 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.810620 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.810659 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.811298 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.811329 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.811359 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.811546 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.811573 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.812220 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.812247 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.812256 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.812476 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.812503 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.812512 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.842037 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" interval="400ms" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.861493 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.862995 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.863052 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.863071 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.863136 4876 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 15 06:51:14 crc kubenswrapper[4876]: E1215 06:51:14.863871 4876 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.70:6443: connect: connection refused" node="crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886128 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886191 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886227 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886259 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886284 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886369 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886413 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886454 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886539 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886581 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886603 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886647 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886687 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886766 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.886801 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.987772 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.987977 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.987998 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.987912 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988086 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988152 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988237 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988269 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988279 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988382 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988416 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988437 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988328 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988453 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988533 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988531 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988595 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988755 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988547 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988682 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988854 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988876 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988666 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988919 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.989020 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.989048 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988950 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.988962 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.989093 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:14 crc kubenswrapper[4876]: I1215 06:51:14.989245 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.064924 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.066567 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.066624 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.066634 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.066663 4876 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 15 06:51:15 crc kubenswrapper[4876]: E1215 06:51:15.067162 4876 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.70:6443: connect: connection refused" node="crc" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.134583 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.143529 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.172083 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:15 crc kubenswrapper[4876]: W1215 06:51:15.176074 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-5f670c1359255704a14dd9164e20f464913857f9a2b68b11d804ff00e3227859 WatchSource:0}: Error finding container 5f670c1359255704a14dd9164e20f464913857f9a2b68b11d804ff00e3227859: Status 404 returned error can't find the container with id 5f670c1359255704a14dd9164e20f464913857f9a2b68b11d804ff00e3227859 Dec 15 06:51:15 crc kubenswrapper[4876]: W1215 06:51:15.182551 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-dfbdfa1dc058aad5098f056f5d214558224506350d0601dcb735dce8520d66f9 WatchSource:0}: Error finding container dfbdfa1dc058aad5098f056f5d214558224506350d0601dcb735dce8520d66f9: Status 404 returned error can't find the container with id dfbdfa1dc058aad5098f056f5d214558224506350d0601dcb735dce8520d66f9 Dec 15 06:51:15 crc kubenswrapper[4876]: W1215 06:51:15.191752 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-79db65be62f5dbf09bb91567a10b2a8655805fe0d7ed64b7059b5bebdd34fd89 WatchSource:0}: Error finding container 79db65be62f5dbf09bb91567a10b2a8655805fe0d7ed64b7059b5bebdd34fd89: Status 404 returned error can't find the container with id 79db65be62f5dbf09bb91567a10b2a8655805fe0d7ed64b7059b5bebdd34fd89 Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.200867 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.207090 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 15 06:51:15 crc kubenswrapper[4876]: W1215 06:51:15.225450 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-d8948aa7721d5d03c2076c16217b2c253cc424e1f6edbac86d9789112027e4e7 WatchSource:0}: Error finding container d8948aa7721d5d03c2076c16217b2c253cc424e1f6edbac86d9789112027e4e7: Status 404 returned error can't find the container with id d8948aa7721d5d03c2076c16217b2c253cc424e1f6edbac86d9789112027e4e7 Dec 15 06:51:15 crc kubenswrapper[4876]: W1215 06:51:15.226825 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-80f048f54c33eb5739ee3755bf542e65112cb34d5fcb1835f760a926ab7cc99a WatchSource:0}: Error finding container 80f048f54c33eb5739ee3755bf542e65112cb34d5fcb1835f760a926ab7cc99a: Status 404 returned error can't find the container with id 80f048f54c33eb5739ee3755bf542e65112cb34d5fcb1835f760a926ab7cc99a Dec 15 06:51:15 crc kubenswrapper[4876]: E1215 06:51:15.243600 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" interval="800ms" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.467725 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.469509 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.469545 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.469560 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.469593 4876 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 15 06:51:15 crc kubenswrapper[4876]: E1215 06:51:15.470086 4876 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.70:6443: connect: connection refused" node="crc" Dec 15 06:51:15 crc kubenswrapper[4876]: W1215 06:51:15.588498 4876 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.70:6443: connect: connection refused Dec 15 06:51:15 crc kubenswrapper[4876]: E1215 06:51:15.588597 4876 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.70:6443: connect: connection refused" logger="UnhandledError" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.634791 4876 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.70:6443: connect: connection refused Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.709985 4876 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2" exitCode=0 Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.710076 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2"} Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.710253 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5f670c1359255704a14dd9164e20f464913857f9a2b68b11d804ff00e3227859"} Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.710386 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.711583 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.711616 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.711628 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.712185 4876 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0" exitCode=0 Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.712272 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0"} Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.712305 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dfbdfa1dc058aad5098f056f5d214558224506350d0601dcb735dce8520d66f9"} Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.712425 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.713359 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.713425 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.713446 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.714331 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.715442 4876 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6" exitCode=0 Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.715556 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.715582 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.715599 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.715500 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6"} Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.715745 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"80f048f54c33eb5739ee3755bf542e65112cb34d5fcb1835f760a926ab7cc99a"} Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.715885 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.717013 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.717051 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.717064 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.720584 4876 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd" exitCode=0 Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.720672 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd"} Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.720720 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d8948aa7721d5d03c2076c16217b2c253cc424e1f6edbac86d9789112027e4e7"} Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.720817 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.725598 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.725629 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.725640 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.729390 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f"} Dec 15 06:51:15 crc kubenswrapper[4876]: I1215 06:51:15.729476 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"79db65be62f5dbf09bb91567a10b2a8655805fe0d7ed64b7059b5bebdd34fd89"} Dec 15 06:51:15 crc kubenswrapper[4876]: W1215 06:51:15.737638 4876 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.70:6443: connect: connection refused Dec 15 06:51:15 crc kubenswrapper[4876]: E1215 06:51:15.737768 4876 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.70:6443: connect: connection refused" logger="UnhandledError" Dec 15 06:51:15 crc kubenswrapper[4876]: W1215 06:51:15.803415 4876 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.70:6443: connect: connection refused Dec 15 06:51:15 crc kubenswrapper[4876]: E1215 06:51:15.803546 4876 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.70:6443: connect: connection refused" logger="UnhandledError" Dec 15 06:51:15 crc kubenswrapper[4876]: W1215 06:51:15.914694 4876 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.70:6443: connect: connection refused Dec 15 06:51:15 crc kubenswrapper[4876]: E1215 06:51:15.914789 4876 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.70:6443: connect: connection refused" logger="UnhandledError" Dec 15 06:51:16 crc kubenswrapper[4876]: E1215 06:51:16.044466 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" interval="1.6s" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.271162 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.273548 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.273585 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.273595 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.273623 4876 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.734593 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.734637 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.734647 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.734738 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.736223 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.736249 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.736259 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.741045 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.741088 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.741118 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.741174 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.742540 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.742560 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.742570 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.745897 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.745976 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.745994 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.746009 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.747486 4876 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1" exitCode=0 Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.747548 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.747694 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.748970 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.749013 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.749030 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.752328 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"71dcd5a6f63f179cddda02c3eaf4a306a0d65724af1d2263ac11adf05c63b73d"} Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.752445 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.753722 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.753752 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.753763 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:16 crc kubenswrapper[4876]: I1215 06:51:16.973195 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.626808 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.767198 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c"} Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.767331 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.768935 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.768984 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.769008 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.770286 4876 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d" exitCode=0 Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.770413 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d"} Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.770432 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.770616 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.771637 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.771687 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.771706 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.772256 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.772315 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:17 crc kubenswrapper[4876]: I1215 06:51:17.772340 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.776888 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2"} Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.776942 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef"} Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.776955 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81"} Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.776964 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9"} Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.776984 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.777044 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.777068 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.778204 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.778234 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.778243 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.779137 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.779205 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:18 crc kubenswrapper[4876]: I1215 06:51:18.779221 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.786583 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881"} Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.786698 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.787579 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.787614 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.787627 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.917615 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.917880 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.917952 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.920326 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.920403 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:19 crc kubenswrapper[4876]: I1215 06:51:19.920426 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.474519 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.474856 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.476647 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.476692 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.476711 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.579673 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.788953 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.789045 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.789905 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.789958 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.789973 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.790656 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.790690 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:20 crc kubenswrapper[4876]: I1215 06:51:20.790702 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:21 crc kubenswrapper[4876]: I1215 06:51:21.696204 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:21 crc kubenswrapper[4876]: I1215 06:51:21.791958 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:21 crc kubenswrapper[4876]: I1215 06:51:21.793703 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:21 crc kubenswrapper[4876]: I1215 06:51:21.793804 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:21 crc kubenswrapper[4876]: I1215 06:51:21.793832 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:22 crc kubenswrapper[4876]: I1215 06:51:22.591735 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 15 06:51:22 crc kubenswrapper[4876]: I1215 06:51:22.592050 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:22 crc kubenswrapper[4876]: I1215 06:51:22.593752 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:22 crc kubenswrapper[4876]: I1215 06:51:22.593797 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:22 crc kubenswrapper[4876]: I1215 06:51:22.593813 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:23 crc kubenswrapper[4876]: I1215 06:51:23.174952 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 15 06:51:23 crc kubenswrapper[4876]: I1215 06:51:23.175319 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:23 crc kubenswrapper[4876]: I1215 06:51:23.180261 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:23 crc kubenswrapper[4876]: I1215 06:51:23.180325 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:23 crc kubenswrapper[4876]: I1215 06:51:23.180345 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.255775 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.256073 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.257822 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.257873 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.257884 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.260816 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.655290 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:24 crc kubenswrapper[4876]: E1215 06:51:24.769919 4876 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.801474 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.802837 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.802895 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:24 crc kubenswrapper[4876]: I1215 06:51:24.802909 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:25 crc kubenswrapper[4876]: I1215 06:51:25.805074 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:25 crc kubenswrapper[4876]: I1215 06:51:25.807332 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:25 crc kubenswrapper[4876]: I1215 06:51:25.807392 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:25 crc kubenswrapper[4876]: I1215 06:51:25.807414 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:25 crc kubenswrapper[4876]: I1215 06:51:25.813036 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:26 crc kubenswrapper[4876]: E1215 06:51:26.274468 4876 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 15 06:51:26 crc kubenswrapper[4876]: I1215 06:51:26.635023 4876 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 15 06:51:26 crc kubenswrapper[4876]: I1215 06:51:26.673483 4876 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 15 06:51:26 crc kubenswrapper[4876]: I1215 06:51:26.673565 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 15 06:51:26 crc kubenswrapper[4876]: I1215 06:51:26.808228 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:26 crc kubenswrapper[4876]: I1215 06:51:26.809120 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:26 crc kubenswrapper[4876]: I1215 06:51:26.809149 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:26 crc kubenswrapper[4876]: I1215 06:51:26.809159 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:27 crc kubenswrapper[4876]: W1215 06:51:27.302502 4876 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.302592 4876 trace.go:236] Trace[277864243]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (15-Dec-2025 06:51:17.300) (total time: 10002ms): Dec 15 06:51:27 crc kubenswrapper[4876]: Trace[277864243]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (06:51:27.302) Dec 15 06:51:27 crc kubenswrapper[4876]: Trace[277864243]: [10.002137165s] [10.002137165s] END Dec 15 06:51:27 crc kubenswrapper[4876]: E1215 06:51:27.302613 4876 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 15 06:51:27 crc kubenswrapper[4876]: E1215 06:51:27.646426 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.655741 4876 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.655857 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.690026 4876 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.690123 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.696907 4876 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.696985 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.875316 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.876561 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.876601 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.876614 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:27 crc kubenswrapper[4876]: I1215 06:51:27.876637 4876 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.705066 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.705411 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.707364 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.707435 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.707459 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.713411 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.822317 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.822397 4876 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.823801 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.823860 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:31 crc kubenswrapper[4876]: I1215 06:51:31.823886 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.563676 4876 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.637313 4876 apiserver.go:52] "Watching apiserver" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.642490 4876 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.642884 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.643622 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.643619 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.643726 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.643811 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.644632 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.644788 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.645069 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.649743 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.652838 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.655177 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.655302 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.657590 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.657762 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.658002 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.658063 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.658084 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.658072 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.658239 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.696232 4876 trace.go:236] Trace[1551875154]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (15-Dec-2025 06:51:18.742) (total time: 13954ms): Dec 15 06:51:32 crc kubenswrapper[4876]: Trace[1551875154]: ---"Objects listed" error: 13953ms (06:51:32.696) Dec 15 06:51:32 crc kubenswrapper[4876]: Trace[1551875154]: [13.954057151s] [13.954057151s] END Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.696286 4876 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.697317 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.697781 4876 trace.go:236] Trace[1167214990]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (15-Dec-2025 06:51:18.927) (total time: 13770ms): Dec 15 06:51:32 crc kubenswrapper[4876]: Trace[1167214990]: ---"Objects listed" error: 13770ms (06:51:32.697) Dec 15 06:51:32 crc kubenswrapper[4876]: Trace[1167214990]: [13.770528884s] [13.770528884s] END Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.698067 4876 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.699539 4876 trace.go:236] Trace[674741910]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (15-Dec-2025 06:51:18.740) (total time: 13958ms): Dec 15 06:51:32 crc kubenswrapper[4876]: Trace[674741910]: ---"Objects listed" error: 13958ms (06:51:32.699) Dec 15 06:51:32 crc kubenswrapper[4876]: Trace[674741910]: [13.958833011s] [13.958833011s] END Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.699579 4876 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.702646 4876 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.721376 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.738628 4876 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.741874 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.749404 4876 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:58224->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.749498 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:58224->192.168.126.11:17697: read: connection reset by peer" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.750005 4876 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.750096 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.758568 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.781366 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.802077 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803601 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803675 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803706 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803801 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803829 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803851 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803872 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803889 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803915 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803945 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803969 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.803988 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804004 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804025 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804043 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804049 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804064 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804158 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804185 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804212 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804229 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804247 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804266 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804284 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804300 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804318 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804338 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804413 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804422 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.804485 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:51:33.30446701 +0000 UTC m=+18.875609921 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804502 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804521 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804541 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804558 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804577 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804594 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804610 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804627 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804643 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804674 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804691 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804708 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804724 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804740 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804755 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804740 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804771 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804862 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804893 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804914 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804918 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804950 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804968 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.804985 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805004 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805022 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805037 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805052 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805071 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805087 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805118 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805149 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805158 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805166 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805214 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805241 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805268 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805295 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805319 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805342 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805362 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805381 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805399 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805414 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805432 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805447 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805463 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805478 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805492 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805492 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805516 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805536 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805557 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805563 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805581 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805606 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805625 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805645 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805668 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805678 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805685 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805730 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805752 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805777 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805800 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805818 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805840 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805863 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805885 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805906 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805923 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805953 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805970 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805988 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806008 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806025 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806045 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806063 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806079 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806099 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806411 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806438 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806467 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806517 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806545 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806564 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806586 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806605 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806625 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806644 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806666 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806693 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806716 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806734 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806748 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806766 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806791 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806816 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806840 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806856 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806873 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806888 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806905 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806934 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806960 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806981 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807026 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807046 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807064 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807084 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807123 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807143 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807160 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807175 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807195 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807223 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807244 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807266 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807284 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807302 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807328 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807353 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807371 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807397 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807418 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807445 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807467 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807485 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807507 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807533 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807555 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807574 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807598 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807623 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807648 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807674 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807700 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807727 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807747 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807773 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807797 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807829 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807855 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807884 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807909 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807932 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807958 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807982 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808006 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808032 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808069 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808090 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808123 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808140 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808156 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808173 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808190 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808211 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808229 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808251 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808271 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808290 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808312 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808332 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808349 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808370 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808390 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808410 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808428 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808451 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808469 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808490 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808513 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808534 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808592 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808617 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808639 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808663 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808690 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808719 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808747 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808801 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808819 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808841 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808864 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808887 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808908 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808928 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808989 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809003 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809014 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809028 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809039 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809050 4876 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809060 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809073 4876 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.813869 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.823169 4876 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805727 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.824451 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.824759 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.825166 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805854 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.825344 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805862 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.805995 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806001 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806097 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806144 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806152 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806291 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806515 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806580 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806645 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806762 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806769 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806791 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806897 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.806956 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807043 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807217 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.807817 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808205 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808299 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808420 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808454 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808463 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808646 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808668 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808833 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808931 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.808936 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809003 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809073 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809291 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.809293 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.810838 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.810929 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.810923 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.810977 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.811085 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.811245 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.811428 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.811561 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.811571 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.811567 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.811608 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.811619 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.811881 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.811954 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.812198 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.812206 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.813226 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.813652 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.813688 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.813739 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.815694 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.816525 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.817236 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.817616 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.817855 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.817861 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.818144 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.818174 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.819655 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.819784 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.820058 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.820126 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.820352 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.820392 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.820515 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.818357 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.818407 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.821419 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.821691 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.822043 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.822156 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.818874 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.819048 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.822458 4876 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.822509 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.822583 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.822692 4876 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.819134 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.819078 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.823567 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.823626 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.824345 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.825508 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.825703 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.825746 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.825828 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.825878 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.825413 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.826304 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.826739 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.826862 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.827050 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.827273 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.827471 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.827538 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.827698 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.827818 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.828058 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.828060 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.828234 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.828358 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.828543 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.828676 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.828683 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.828854 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.829150 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.829213 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.829269 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.829533 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.829890 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.830138 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.830657 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.830928 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.831337 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.831498 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.831613 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.831743 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.833308 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.833490 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.833504 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.833632 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.834176 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.835065 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.835269 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.835391 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.835943 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.837422 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:33.337330801 +0000 UTC m=+18.908473952 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.837512 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:33.337503436 +0000 UTC m=+18.908646347 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.838058 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.838939 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.839256 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.841577 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.841804 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.842049 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.842163 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.842190 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.842208 4876 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.842275 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:33.342253435 +0000 UTC m=+18.913396336 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.842316 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.842414 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.842427 4876 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.842425 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.842463 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:33.34245272 +0000 UTC m=+18.913595831 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.842759 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.842939 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.843307 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.843361 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.843892 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.844717 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.845554 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.845591 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.846383 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.846394 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.847218 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.847395 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.847539 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.847631 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.848698 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.848735 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.848856 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.848927 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.849782 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.849809 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.850973 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.851232 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.851348 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.851355 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.851692 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.852142 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.852265 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.852651 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.853192 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.853600 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.853733 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.853797 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.854018 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.854541 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.854914 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.855167 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.855757 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.857320 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.857386 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.858282 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.860513 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.860785 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.862381 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.863087 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.863271 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.864708 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.865376 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.865845 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.866591 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.869402 4876 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c" exitCode=255 Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.869456 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c"} Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.869643 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.870471 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.870657 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.870895 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.884446 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.886738 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.887973 4876 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.888047 4876 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.890885 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.890939 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.890950 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.890967 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.890978 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:32Z","lastTransitionTime":"2025-12-15T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.898401 4876 scope.go:117] "RemoveContainer" containerID="c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.900635 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.901375 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.901548 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.904266 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.908402 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910262 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910325 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910403 4876 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910420 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910429 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910463 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910476 4876 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910500 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910519 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910534 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910551 4876 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910567 4876 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910580 4876 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910594 4876 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910608 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910621 4876 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910634 4876 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910648 4876 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910661 4876 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910677 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910693 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910708 4876 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910722 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910737 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910752 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910766 4876 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910800 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910813 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910826 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910839 4876 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910856 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910869 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910882 4876 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910895 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910910 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910926 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910942 4876 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910959 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910973 4876 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.910987 4876 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911001 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911017 4876 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911030 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911043 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911058 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911071 4876 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911085 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911116 4876 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911131 4876 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911148 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911162 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911177 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911191 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911206 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911219 4876 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911234 4876 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911247 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911262 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911275 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911291 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911304 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911317 4876 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911331 4876 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911346 4876 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911360 4876 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911377 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911395 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911411 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911427 4876 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911441 4876 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911454 4876 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911495 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911527 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911545 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911559 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911574 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911606 4876 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911621 4876 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911635 4876 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911649 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911663 4876 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911677 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911690 4876 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911705 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911719 4876 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911732 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911745 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911758 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911770 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911781 4876 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911791 4876 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911800 4876 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911811 4876 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911834 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911847 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911857 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911869 4876 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911879 4876 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911890 4876 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911901 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911911 4876 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911922 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911932 4876 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911943 4876 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911952 4876 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911964 4876 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911975 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911985 4876 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.911995 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912007 4876 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912018 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912027 4876 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912041 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912051 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912062 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912073 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912139 4876 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912151 4876 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912163 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912174 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912185 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912196 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912209 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912219 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912229 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912238 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912246 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912267 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912278 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912297 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912311 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:32Z","lastTransitionTime":"2025-12-15T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912404 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912249 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912560 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912571 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912582 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912594 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912603 4876 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912614 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912623 4876 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912633 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912643 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912652 4876 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912662 4876 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912672 4876 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912682 4876 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912692 4876 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912702 4876 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912712 4876 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912723 4876 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912735 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912745 4876 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912756 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912765 4876 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912775 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912784 4876 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912795 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912805 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912815 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912826 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912836 4876 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912845 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912855 4876 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912865 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912874 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912883 4876 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912891 4876 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912900 4876 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912910 4876 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912919 4876 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912928 4876 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912938 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912947 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912956 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912965 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.912976 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913001 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913010 4876 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913020 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913029 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913039 4876 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913048 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913058 4876 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913067 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913077 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913087 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913116 4876 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913129 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913142 4876 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913153 4876 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913165 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913175 4876 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913185 4876 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913196 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913206 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.913217 4876 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.923592 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.923911 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.929229 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.929328 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.929344 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.929360 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.929371 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:32Z","lastTransitionTime":"2025-12-15T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.937447 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.952885 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.966065 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.966144 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.966156 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.966178 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.966192 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:32Z","lastTransitionTime":"2025-12-15T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.968044 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.977285 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 15 06:51:32 crc kubenswrapper[4876]: E1215 06:51:32.983864 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.988544 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.988573 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.988581 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.988596 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.988606 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:32Z","lastTransitionTime":"2025-12-15T06:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.988910 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 15 06:51:32 crc kubenswrapper[4876]: I1215 06:51:32.996202 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 15 06:51:32 crc kubenswrapper[4876]: W1215 06:51:32.996650 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-470ea57ba01bf161d1c942e74b98e2f4776dec76c9f66c1169e044d447191423 WatchSource:0}: Error finding container 470ea57ba01bf161d1c942e74b98e2f4776dec76c9f66c1169e044d447191423: Status 404 returned error can't find the container with id 470ea57ba01bf161d1c942e74b98e2f4776dec76c9f66c1169e044d447191423 Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.000505 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.000617 4876 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.006203 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.006230 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.006239 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.006257 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.006268 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:33Z","lastTransitionTime":"2025-12-15T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.108356 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.108386 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.108394 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.108406 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.108416 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:33Z","lastTransitionTime":"2025-12-15T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.199746 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.221671 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.221717 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.221728 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.221746 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.221757 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:33Z","lastTransitionTime":"2025-12-15T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.225861 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.226628 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.227818 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.243373 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.259458 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.273069 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.285564 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.298507 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.310388 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.316429 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.316642 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:51:34.316614439 +0000 UTC m=+19.887757350 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.322602 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.323902 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.323940 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.323950 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.323966 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.323976 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:33Z","lastTransitionTime":"2025-12-15T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.337197 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.349094 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.361792 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.390853 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.405087 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.416862 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.417214 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.417273 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.417299 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.417325 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417433 4876 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417432 4876 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417527 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:34.417494184 +0000 UTC m=+19.988637095 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417597 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:34.417572536 +0000 UTC m=+19.988715617 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417600 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417644 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417648 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417702 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417720 4876 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417796 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:34.417773342 +0000 UTC m=+19.988916253 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417663 4876 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:33 crc kubenswrapper[4876]: E1215 06:51:33.417859 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:34.417847823 +0000 UTC m=+19.988990734 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.427687 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.428059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.428258 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.428358 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.428441 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:33Z","lastTransitionTime":"2025-12-15T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.435236 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.531268 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.531306 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.531317 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.531333 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.531343 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:33Z","lastTransitionTime":"2025-12-15T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.633269 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.633305 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.633313 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.633324 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.633334 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:33Z","lastTransitionTime":"2025-12-15T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.736446 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.736503 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.736517 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.736534 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.736545 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:33Z","lastTransitionTime":"2025-12-15T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.839955 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.840031 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.840054 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.840085 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.840199 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:33Z","lastTransitionTime":"2025-12-15T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.875839 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.887543 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.888914 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.890927 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"717402947f49ec98068299064d3e632725de8e94f36337ceee11d3426a34fce7"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.894222 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.894237 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.894247 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8fa2184b644852c213db12e986422649f42d82fa8ba7d812075c8d8f1d92e331"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.902184 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.902244 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"470ea57ba01bf161d1c942e74b98e2f4776dec76c9f66c1169e044d447191423"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.912745 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.930668 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.943142 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.943183 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.943195 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.943211 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.943222 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:33Z","lastTransitionTime":"2025-12-15T06:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.945533 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.959315 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.976772 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:33 crc kubenswrapper[4876]: I1215 06:51:33.998034 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:33Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.020818 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.034616 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.044986 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.045043 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.045055 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.045071 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.045083 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:34Z","lastTransitionTime":"2025-12-15T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.045817 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.066247 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.081951 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.093915 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.106056 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.121468 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.137577 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.147625 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.147678 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.147696 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.147920 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.147946 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:34Z","lastTransitionTime":"2025-12-15T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.147978 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.250175 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.250228 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.250239 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.250253 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.250263 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:34Z","lastTransitionTime":"2025-12-15T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.326980 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.327212 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:51:36.327181573 +0000 UTC m=+21.898324494 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.352726 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.352764 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.352775 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.352790 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.352802 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:34Z","lastTransitionTime":"2025-12-15T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.427834 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.427873 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.427893 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.427910 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.427993 4876 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428036 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:36.428023527 +0000 UTC m=+21.999166438 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428090 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428123 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428133 4876 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428152 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:36.428145861 +0000 UTC m=+21.999288772 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428187 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428194 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428200 4876 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428217 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:36.428212333 +0000 UTC m=+21.999355244 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428244 4876 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.428260 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:36.428255184 +0000 UTC m=+21.999398095 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.455545 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.455587 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.455601 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.455620 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.455635 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:34Z","lastTransitionTime":"2025-12-15T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.557987 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.558054 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.558070 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.558096 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.558160 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:34Z","lastTransitionTime":"2025-12-15T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.660483 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.660683 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.660734 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.660751 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.660774 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.660791 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:34Z","lastTransitionTime":"2025-12-15T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.664049 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.671202 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.676997 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.701516 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.704682 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.704792 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.704825 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.704875 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.704899 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:34 crc kubenswrapper[4876]: E1215 06:51:34.705010 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.711978 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.712909 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.714416 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.715218 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.716456 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.717081 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.717265 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.717878 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.719310 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.720313 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.721699 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.722437 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.723803 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.724517 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.725204 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.726470 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.727182 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.728514 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.729328 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.730675 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.730943 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.732088 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.733704 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.735961 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.736572 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.737892 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.738475 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.739893 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.740673 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.741236 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.742429 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.742906 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.743793 4876 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.743913 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.745870 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.747030 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.747534 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.749343 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.750358 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.750902 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.752083 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.752771 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.753281 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.754348 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.755340 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.755985 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.756933 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.757609 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.758672 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.759132 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.759657 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.760543 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.761050 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.761852 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.762966 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.763272 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.763293 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.763334 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.763354 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.763365 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:34Z","lastTransitionTime":"2025-12-15T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.763693 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.764990 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.784287 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.799179 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.810685 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.824836 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.843502 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.857711 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.865542 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.865583 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.865612 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.865631 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.865860 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:34Z","lastTransitionTime":"2025-12-15T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.870777 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.882148 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.898371 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.913892 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.926130 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.938265 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.951027 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.964833 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.968490 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.968535 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.968549 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.968571 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.968589 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:34Z","lastTransitionTime":"2025-12-15T06:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:34 crc kubenswrapper[4876]: I1215 06:51:34.977644 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.002204 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.018475 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.035912 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.049630 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.060550 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.071946 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.072011 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.072030 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.072051 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.072075 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:35Z","lastTransitionTime":"2025-12-15T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.077128 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.175079 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.175132 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.175141 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.175154 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.175163 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:35Z","lastTransitionTime":"2025-12-15T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.278025 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.278078 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.278090 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.278128 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.278140 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:35Z","lastTransitionTime":"2025-12-15T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.380578 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.380616 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.380627 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.380643 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.380654 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:35Z","lastTransitionTime":"2025-12-15T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.482701 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.482758 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.482770 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.482788 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.482799 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:35Z","lastTransitionTime":"2025-12-15T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.586281 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.586323 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.586333 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.586348 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.586359 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:35Z","lastTransitionTime":"2025-12-15T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.689262 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.689343 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.689387 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.689419 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.689440 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:35Z","lastTransitionTime":"2025-12-15T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.793592 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.793895 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.793912 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.793932 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.793947 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:35Z","lastTransitionTime":"2025-12-15T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.896164 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.896207 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.896217 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.896231 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.896242 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:35Z","lastTransitionTime":"2025-12-15T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.908066 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595"} Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.921607 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.936424 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.951176 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.963053 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.999886 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.999938 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:35 crc kubenswrapper[4876]: I1215 06:51:35.999954 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:35.999975 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:35.999991 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:35Z","lastTransitionTime":"2025-12-15T06:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.008279 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.040489 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.058909 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.072656 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.086687 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.102531 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.102556 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.102568 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.102586 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.102600 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:36Z","lastTransitionTime":"2025-12-15T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.205973 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.206024 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.206040 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.206059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.206073 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:36Z","lastTransitionTime":"2025-12-15T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.311209 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.311261 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.311271 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.311290 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.311307 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:36Z","lastTransitionTime":"2025-12-15T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.346482 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.346704 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:51:40.346681689 +0000 UTC m=+25.917824610 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.414484 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.414533 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.414551 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.414569 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.414582 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:36Z","lastTransitionTime":"2025-12-15T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.448002 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.448190 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.448252 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448307 4876 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448374 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448404 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:40.448382136 +0000 UTC m=+26.019525057 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448406 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448428 4876 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448480 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:40.448463118 +0000 UTC m=+26.019606039 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448636 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448684 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448697 4876 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448795 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:40.448773367 +0000 UTC m=+26.019916288 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448711 4876 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.448844 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:40.448836249 +0000 UTC m=+26.019979170 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.448347 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.517514 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.517607 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.517625 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.517677 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.517698 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:36Z","lastTransitionTime":"2025-12-15T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.621260 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.621366 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.621387 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.621412 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.621428 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:36Z","lastTransitionTime":"2025-12-15T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.705473 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.705616 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.705758 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.705793 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.705975 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:36 crc kubenswrapper[4876]: E1215 06:51:36.706098 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.723672 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.723757 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.723771 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.723786 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.723802 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:36Z","lastTransitionTime":"2025-12-15T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.826854 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.826913 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.826923 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.826940 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.826951 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:36Z","lastTransitionTime":"2025-12-15T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.930225 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.930293 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.930308 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.930330 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:36 crc kubenswrapper[4876]: I1215 06:51:36.930345 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:36Z","lastTransitionTime":"2025-12-15T06:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.034249 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.034324 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.034337 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.034363 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.034377 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:37Z","lastTransitionTime":"2025-12-15T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.138532 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.138597 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.138615 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.138640 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.138657 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:37Z","lastTransitionTime":"2025-12-15T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.241703 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.241754 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.241768 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.241787 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.241801 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:37Z","lastTransitionTime":"2025-12-15T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.344831 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.344872 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.344885 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.344903 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.344915 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:37Z","lastTransitionTime":"2025-12-15T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.448724 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.448799 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.448825 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.448854 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.448877 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:37Z","lastTransitionTime":"2025-12-15T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.552235 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.552299 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.552319 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.552342 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.552365 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:37Z","lastTransitionTime":"2025-12-15T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.655482 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.655540 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.655560 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.655583 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.655602 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:37Z","lastTransitionTime":"2025-12-15T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.759532 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.759628 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.759655 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.759692 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.759719 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:37Z","lastTransitionTime":"2025-12-15T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.863296 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.863453 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.863484 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.863561 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.863585 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:37Z","lastTransitionTime":"2025-12-15T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.967599 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.967685 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.967705 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.967773 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:37 crc kubenswrapper[4876]: I1215 06:51:37.967792 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:37Z","lastTransitionTime":"2025-12-15T06:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.070898 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.070941 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.070953 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.070970 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.070983 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:38Z","lastTransitionTime":"2025-12-15T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.174267 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.174312 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.174326 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.174348 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.174364 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:38Z","lastTransitionTime":"2025-12-15T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.263725 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-nkxj7"] Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.264146 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-nkxj7" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.270284 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.274035 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.274315 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.277816 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.277845 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.277856 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.277873 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.277884 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:38Z","lastTransitionTime":"2025-12-15T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.290516 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.306392 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.333567 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.349791 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.367555 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fkrr\" (UniqueName: \"kubernetes.io/projected/4405bff2-7918-48ba-97b3-81079e042256-kube-api-access-4fkrr\") pod \"node-resolver-nkxj7\" (UID: \"4405bff2-7918-48ba-97b3-81079e042256\") " pod="openshift-dns/node-resolver-nkxj7" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.367656 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4405bff2-7918-48ba-97b3-81079e042256-hosts-file\") pod \"node-resolver-nkxj7\" (UID: \"4405bff2-7918-48ba-97b3-81079e042256\") " pod="openshift-dns/node-resolver-nkxj7" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.371646 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-vlwzk"] Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.371901 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.372210 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.376817 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.377148 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.382208 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.382882 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.382908 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.382918 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.382932 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.382943 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:38Z","lastTransitionTime":"2025-12-15T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.384512 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.411019 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.429724 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.443369 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.468651 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/410e3d06-6cd9-4b3a-812f-f07217c30488-host\") pod \"node-ca-vlwzk\" (UID: \"410e3d06-6cd9-4b3a-812f-f07217c30488\") " pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.468729 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fkrr\" (UniqueName: \"kubernetes.io/projected/4405bff2-7918-48ba-97b3-81079e042256-kube-api-access-4fkrr\") pod \"node-resolver-nkxj7\" (UID: \"4405bff2-7918-48ba-97b3-81079e042256\") " pod="openshift-dns/node-resolver-nkxj7" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.468790 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4405bff2-7918-48ba-97b3-81079e042256-hosts-file\") pod \"node-resolver-nkxj7\" (UID: \"4405bff2-7918-48ba-97b3-81079e042256\") " pod="openshift-dns/node-resolver-nkxj7" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.468826 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jjs6\" (UniqueName: \"kubernetes.io/projected/410e3d06-6cd9-4b3a-812f-f07217c30488-kube-api-access-6jjs6\") pod \"node-ca-vlwzk\" (UID: \"410e3d06-6cd9-4b3a-812f-f07217c30488\") " pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.468878 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/410e3d06-6cd9-4b3a-812f-f07217c30488-serviceca\") pod \"node-ca-vlwzk\" (UID: \"410e3d06-6cd9-4b3a-812f-f07217c30488\") " pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.469421 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4405bff2-7918-48ba-97b3-81079e042256-hosts-file\") pod \"node-resolver-nkxj7\" (UID: \"4405bff2-7918-48ba-97b3-81079e042256\") " pod="openshift-dns/node-resolver-nkxj7" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.485323 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.485368 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.485382 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.485402 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.485414 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:38Z","lastTransitionTime":"2025-12-15T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.488639 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fkrr\" (UniqueName: \"kubernetes.io/projected/4405bff2-7918-48ba-97b3-81079e042256-kube-api-access-4fkrr\") pod \"node-resolver-nkxj7\" (UID: \"4405bff2-7918-48ba-97b3-81079e042256\") " pod="openshift-dns/node-resolver-nkxj7" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.490208 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.502185 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.515560 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.527181 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.548708 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.563858 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.570076 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/410e3d06-6cd9-4b3a-812f-f07217c30488-serviceca\") pod \"node-ca-vlwzk\" (UID: \"410e3d06-6cd9-4b3a-812f-f07217c30488\") " pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.570483 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/410e3d06-6cd9-4b3a-812f-f07217c30488-host\") pod \"node-ca-vlwzk\" (UID: \"410e3d06-6cd9-4b3a-812f-f07217c30488\") " pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.570595 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/410e3d06-6cd9-4b3a-812f-f07217c30488-host\") pod \"node-ca-vlwzk\" (UID: \"410e3d06-6cd9-4b3a-812f-f07217c30488\") " pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.570634 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jjs6\" (UniqueName: \"kubernetes.io/projected/410e3d06-6cd9-4b3a-812f-f07217c30488-kube-api-access-6jjs6\") pod \"node-ca-vlwzk\" (UID: \"410e3d06-6cd9-4b3a-812f-f07217c30488\") " pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.571088 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/410e3d06-6cd9-4b3a-812f-f07217c30488-serviceca\") pod \"node-ca-vlwzk\" (UID: \"410e3d06-6cd9-4b3a-812f-f07217c30488\") " pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.576969 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-nkxj7" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.580696 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.588072 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jjs6\" (UniqueName: \"kubernetes.io/projected/410e3d06-6cd9-4b3a-812f-f07217c30488-kube-api-access-6jjs6\") pod \"node-ca-vlwzk\" (UID: \"410e3d06-6cd9-4b3a-812f-f07217c30488\") " pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.589415 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.589462 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.589474 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.589496 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.589510 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:38Z","lastTransitionTime":"2025-12-15T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:38 crc kubenswrapper[4876]: W1215 06:51:38.589747 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4405bff2_7918_48ba_97b3_81079e042256.slice/crio-90561e9e3ae18598ea6f034dd60bef2e46b13216c9cb034859c31ce6196b0085 WatchSource:0}: Error finding container 90561e9e3ae18598ea6f034dd60bef2e46b13216c9cb034859c31ce6196b0085: Status 404 returned error can't find the container with id 90561e9e3ae18598ea6f034dd60bef2e46b13216c9cb034859c31ce6196b0085 Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.625222 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.662545 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.683504 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vlwzk" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.700421 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.700471 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.700481 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.700499 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.700512 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:38Z","lastTransitionTime":"2025-12-15T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.701446 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.706290 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.706368 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.706439 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:38 crc kubenswrapper[4876]: E1215 06:51:38.706430 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:38 crc kubenswrapper[4876]: E1215 06:51:38.706581 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:38 crc kubenswrapper[4876]: E1215 06:51:38.706663 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:38 crc kubenswrapper[4876]: W1215 06:51:38.706764 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod410e3d06_6cd9_4b3a_812f_f07217c30488.slice/crio-d0020016e4d1b2cf76ce466276007573cd52bdabb30e6f1b1162578ba40213da WatchSource:0}: Error finding container d0020016e4d1b2cf76ce466276007573cd52bdabb30e6f1b1162578ba40213da: Status 404 returned error can't find the container with id d0020016e4d1b2cf76ce466276007573cd52bdabb30e6f1b1162578ba40213da Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.758429 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.784388 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.803133 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.803181 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.803196 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.803218 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.803230 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:38Z","lastTransitionTime":"2025-12-15T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.805045 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.906509 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.906556 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.906568 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.906589 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.906604 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:38Z","lastTransitionTime":"2025-12-15T06:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.918830 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vlwzk" event={"ID":"410e3d06-6cd9-4b3a-812f-f07217c30488","Type":"ContainerStarted","Data":"d0020016e4d1b2cf76ce466276007573cd52bdabb30e6f1b1162578ba40213da"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.920073 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-nkxj7" event={"ID":"4405bff2-7918-48ba-97b3-81079e042256","Type":"ContainerStarted","Data":"2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.920143 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-nkxj7" event={"ID":"4405bff2-7918-48ba-97b3-81079e042256","Type":"ContainerStarted","Data":"90561e9e3ae18598ea6f034dd60bef2e46b13216c9cb034859c31ce6196b0085"} Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.947083 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.968190 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:38 crc kubenswrapper[4876]: I1215 06:51:38.993821 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.009644 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.009694 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.009705 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.009722 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.009733 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:39Z","lastTransitionTime":"2025-12-15T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.015699 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.028152 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.042048 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.054081 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.070381 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.086930 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.098862 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.112862 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.112921 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.112934 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.112962 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.112976 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:39Z","lastTransitionTime":"2025-12-15T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.116948 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.215974 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.216031 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.216040 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.216061 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.216072 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:39Z","lastTransitionTime":"2025-12-15T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.257767 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-ddcwq"] Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.258255 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-47dbh"] Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.258448 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.260338 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.261404 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.261719 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.261815 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-zdprc"] Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.261891 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.262233 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.266089 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.266147 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.266244 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.266362 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.266371 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.266392 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-wm92c"] Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.266493 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.266572 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.266821 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.266838 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.267901 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.269756 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.270310 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.270347 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.270548 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.274663 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.274917 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.277383 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.304685 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.318678 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.318723 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.318734 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.318754 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.318767 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:39Z","lastTransitionTime":"2025-12-15T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.322307 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.335682 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.350070 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.365579 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379351 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379392 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-node-log\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379502 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-ovn-kubernetes\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379529 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-daemon-config\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379549 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-hostroot\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379572 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379593 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9e29c3a-f186-4bb8-af46-82cea3a16508-proxy-tls\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379638 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-cni-binary-copy\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379687 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-systemd\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379715 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-tuning-conf-dir\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379740 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgztf\" (UniqueName: \"kubernetes.io/projected/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-kube-api-access-tgztf\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379794 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-kubelet\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379817 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-config\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379838 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt2gt\" (UniqueName: \"kubernetes.io/projected/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-kube-api-access-kt2gt\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379860 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-cnibin\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379915 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2qgc\" (UniqueName: \"kubernetes.io/projected/f9e29c3a-f186-4bb8-af46-82cea3a16508-kube-api-access-r2qgc\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379938 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-run-multus-certs\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379961 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-var-lib-openvswitch\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.379988 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovn-node-metrics-cert\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380017 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-var-lib-cni-multus\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380073 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-conf-dir\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380124 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-etc-kubernetes\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380150 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-netns\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380180 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-socket-dir-parent\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380207 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-var-lib-kubelet\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380238 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-run-k8s-cni-cncf-io\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380262 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380284 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f9e29c3a-f186-4bb8-af46-82cea3a16508-rootfs\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380319 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-system-cni-dir\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380348 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-os-release\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380367 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-cni-binary-copy\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380383 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f9e29c3a-f186-4bb8-af46-82cea3a16508-mcd-auth-proxy-config\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380397 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-run-netns\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380460 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-systemd-units\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380476 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-ovn\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380509 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-bin\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380526 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-script-lib\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380556 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-system-cni-dir\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380571 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-os-release\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380586 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-etc-openvswitch\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380604 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzj5w\" (UniqueName: \"kubernetes.io/projected/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-kube-api-access-zzj5w\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380618 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-cni-dir\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380632 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-var-lib-cni-bin\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380659 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-slash\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380683 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-openvswitch\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380702 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-env-overrides\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380721 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-log-socket\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380740 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-netd\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.380761 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-cnibin\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.392815 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.405568 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.421791 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.424696 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.424756 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.424780 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.424800 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.424811 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:39Z","lastTransitionTime":"2025-12-15T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.436173 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.447995 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.461788 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.474151 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482071 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-log-socket\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482207 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-log-socket\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482321 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-netd\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482411 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-cnibin\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482359 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-netd\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482562 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-cnibin\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482498 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-node-log\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482620 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-ovn-kubernetes\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482644 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-daemon-config\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482668 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482689 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9e29c3a-f186-4bb8-af46-82cea3a16508-proxy-tls\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482707 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-hostroot\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482746 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-systemd\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482766 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-tuning-conf-dir\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482784 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-cni-binary-copy\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482801 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-kubelet\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482819 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-config\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482808 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-ovn-kubernetes\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482835 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt2gt\" (UniqueName: \"kubernetes.io/projected/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-kube-api-access-kt2gt\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482932 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-cnibin\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482966 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgztf\" (UniqueName: \"kubernetes.io/projected/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-kube-api-access-tgztf\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.482996 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2qgc\" (UniqueName: \"kubernetes.io/projected/f9e29c3a-f186-4bb8-af46-82cea3a16508-kube-api-access-r2qgc\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483016 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-run-multus-certs\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483047 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-var-lib-openvswitch\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483059 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-hostroot\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483068 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovn-node-metrics-cert\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483092 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-systemd\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483095 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-var-lib-cni-multus\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483137 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-var-lib-cni-multus\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483156 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-conf-dir\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483179 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-netns\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483197 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-socket-dir-parent\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483215 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-cnibin\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483217 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-var-lib-kubelet\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483239 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-var-lib-kubelet\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483264 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-etc-kubernetes\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483291 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-run-k8s-cni-cncf-io\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483338 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-os-release\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483348 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-run-multus-certs\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483359 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-cni-binary-copy\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483414 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483444 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f9e29c3a-f186-4bb8-af46-82cea3a16508-rootfs\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483471 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-system-cni-dir\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483496 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-systemd-units\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483513 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-ovn\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483530 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-bin\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483551 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-script-lib\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483570 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f9e29c3a-f186-4bb8-af46-82cea3a16508-mcd-auth-proxy-config\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483586 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-run-netns\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483610 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-tuning-conf-dir\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483615 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-system-cni-dir\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483663 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-os-release\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483689 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-etc-openvswitch\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483716 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzj5w\" (UniqueName: \"kubernetes.io/projected/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-kube-api-access-zzj5w\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483738 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-cni-dir\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483762 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-var-lib-cni-bin\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483797 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-slash\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483821 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-openvswitch\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483844 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-env-overrides\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484158 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-daemon-config\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484256 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-etc-kubernetes\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484273 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-cni-binary-copy\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484313 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-var-lib-openvswitch\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484325 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-run-k8s-cni-cncf-io\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484331 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-cni-binary-copy\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484391 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-os-release\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484400 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-ovn\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484435 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-bin\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484553 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484612 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-system-cni-dir\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484613 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-env-overrides\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484643 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-kubelet\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484671 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f9e29c3a-f186-4bb8-af46-82cea3a16508-rootfs\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484718 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-systemd-units\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484750 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484787 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-run-netns\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484825 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-etc-openvswitch\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484857 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-conf-dir\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484881 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-netns\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484923 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-socket-dir-parent\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.483638 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-system-cni-dir\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.484984 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-os-release\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.485015 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-host-var-lib-cni-bin\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.485182 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-script-lib\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.485244 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f9e29c3a-f186-4bb8-af46-82cea3a16508-mcd-auth-proxy-config\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.485276 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-config\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.485289 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-slash\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.485349 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-openvswitch\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.485526 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-multus-cni-dir\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.485678 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-node-log\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.487257 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9e29c3a-f186-4bb8-af46-82cea3a16508-proxy-tls\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.489357 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.490360 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovn-node-metrics-cert\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.505999 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzj5w\" (UniqueName: \"kubernetes.io/projected/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-kube-api-access-zzj5w\") pod \"ovnkube-node-wm92c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.506513 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgztf\" (UniqueName: \"kubernetes.io/projected/d2c0440d-a8eb-4f51-8626-c3bb9d1b0867-kube-api-access-tgztf\") pod \"multus-ddcwq\" (UID: \"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\") " pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.509080 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt2gt\" (UniqueName: \"kubernetes.io/projected/9f253db4-4bae-4bc6-ae0d-a15c6a19ab86-kube-api-access-kt2gt\") pod \"multus-additional-cni-plugins-47dbh\" (UID: \"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\") " pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.511353 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.512174 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2qgc\" (UniqueName: \"kubernetes.io/projected/f9e29c3a-f186-4bb8-af46-82cea3a16508-kube-api-access-r2qgc\") pod \"machine-config-daemon-zdprc\" (UID: \"f9e29c3a-f186-4bb8-af46-82cea3a16508\") " pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.527589 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.527968 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.528058 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.528203 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.528268 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:39Z","lastTransitionTime":"2025-12-15T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.539795 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.556295 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.569264 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.576418 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-ddcwq" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.584045 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.584842 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-47dbh" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.595930 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.602084 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:39 crc kubenswrapper[4876]: W1215 06:51:39.603028 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f253db4_4bae_4bc6_ae0d_a15c6a19ab86.slice/crio-24f4463556d57ec22091fac064fb9f87c8cea6f702399ab4d3a2f45959e01eab WatchSource:0}: Error finding container 24f4463556d57ec22091fac064fb9f87c8cea6f702399ab4d3a2f45959e01eab: Status 404 returned error can't find the container with id 24f4463556d57ec22091fac064fb9f87c8cea6f702399ab4d3a2f45959e01eab Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.603183 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.619503 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: W1215 06:51:39.623222 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2bf97f83_6aa5_4a4d_9d1b_e84244c6d44c.slice/crio-b4811f813be370967183741aaa81c940db056de7fd4fff263e4658a182cf728d WatchSource:0}: Error finding container b4811f813be370967183741aaa81c940db056de7fd4fff263e4658a182cf728d: Status 404 returned error can't find the container with id b4811f813be370967183741aaa81c940db056de7fd4fff263e4658a182cf728d Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.632170 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.632232 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.632243 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.632262 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.632292 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:39Z","lastTransitionTime":"2025-12-15T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.635093 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.650614 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.661612 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.678278 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.692483 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.709969 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.735500 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.735559 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.735575 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.735640 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.735663 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:39Z","lastTransitionTime":"2025-12-15T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.850211 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.850782 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.850794 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.850816 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.850831 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:39Z","lastTransitionTime":"2025-12-15T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.926825 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" event={"ID":"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86","Type":"ContainerStarted","Data":"866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.926893 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" event={"ID":"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86","Type":"ContainerStarted","Data":"24f4463556d57ec22091fac064fb9f87c8cea6f702399ab4d3a2f45959e01eab"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.928446 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ddcwq" event={"ID":"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867","Type":"ContainerStarted","Data":"522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.928482 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ddcwq" event={"ID":"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867","Type":"ContainerStarted","Data":"ce5aa348eab736871008333de4d361ef5cda77a7fcf9b207e51c2b9a9ef2b61d"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.930355 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vlwzk" event={"ID":"410e3d06-6cd9-4b3a-812f-f07217c30488","Type":"ContainerStarted","Data":"8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.932275 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de" exitCode=0 Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.932375 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.932470 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"b4811f813be370967183741aaa81c940db056de7fd4fff263e4658a182cf728d"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.934646 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.934679 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.934694 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"18d6cde9a0d6e350ab7406c557538c885810941ec30c6f51c0bc10867a897714"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.949562 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.954346 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.954421 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.954444 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.954471 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.954492 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:39Z","lastTransitionTime":"2025-12-15T06:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.964291 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.983719 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:39 crc kubenswrapper[4876]: I1215 06:51:39.998394 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:39Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.010606 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.032123 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.052458 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.057069 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.057134 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.057148 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.057168 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.057184 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:40Z","lastTransitionTime":"2025-12-15T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.070123 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.085004 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.108746 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.128687 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.144661 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.161624 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.161905 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.161925 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.161935 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.161950 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.161962 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:40Z","lastTransitionTime":"2025-12-15T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.179349 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.191875 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.205766 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.227721 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.241914 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.261422 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.264665 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.264723 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.264736 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.264759 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.264784 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:40Z","lastTransitionTime":"2025-12-15T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.274742 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.287121 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.306448 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.320412 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.334712 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.346246 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.360282 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.368496 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.368563 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.368575 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.368599 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.368616 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:40Z","lastTransitionTime":"2025-12-15T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.376678 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.393276 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.393521 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:51:48.393488561 +0000 UTC m=+33.964631522 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.408455 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.444188 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.472351 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.472387 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.472397 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.472411 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.472420 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:40Z","lastTransitionTime":"2025-12-15T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.485875 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.494165 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.494206 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.494231 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.494254 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494313 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494332 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494334 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494372 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494375 4876 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494382 4876 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494418 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:48.494405448 +0000 UTC m=+34.065548359 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494432 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:48.494427338 +0000 UTC m=+34.065570239 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494453 4876 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494480 4876 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494584 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:48.494552421 +0000 UTC m=+34.065695322 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.494611 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:48.494602433 +0000 UTC m=+34.065745334 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.575274 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.575316 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.575326 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.575341 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.575356 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:40Z","lastTransitionTime":"2025-12-15T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.678509 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.679231 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.679243 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.679261 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.679272 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:40Z","lastTransitionTime":"2025-12-15T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.704716 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.704761 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.704834 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.706067 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.706214 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:40 crc kubenswrapper[4876]: E1215 06:51:40.706298 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.783191 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.783245 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.783260 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.783286 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.783301 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:40Z","lastTransitionTime":"2025-12-15T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.886057 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.886134 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.886149 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.886173 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.886191 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:40Z","lastTransitionTime":"2025-12-15T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.942522 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.943999 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.944090 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.944221 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.944363 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.944506 4876 generic.go:334] "Generic (PLEG): container finished" podID="9f253db4-4bae-4bc6-ae0d-a15c6a19ab86" containerID="866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1" exitCode=0 Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.944615 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" event={"ID":"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86","Type":"ContainerDied","Data":"866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1"} Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.959233 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.971961 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.983865 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.993258 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.993310 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.993326 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.993349 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:40 crc kubenswrapper[4876]: I1215 06:51:40.993366 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:40Z","lastTransitionTime":"2025-12-15T06:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.001480 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:40Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.024395 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.044190 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.059398 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.075476 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.088668 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.098448 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.098487 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.098496 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.098513 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.098524 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:41Z","lastTransitionTime":"2025-12-15T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.102230 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.116524 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.130948 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.143257 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.154541 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.169968 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.201597 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.201658 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.201671 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.201695 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.201714 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:41Z","lastTransitionTime":"2025-12-15T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.305590 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.305640 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.305650 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.305670 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.305682 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:41Z","lastTransitionTime":"2025-12-15T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.408722 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.408757 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.408766 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.408778 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.408788 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:41Z","lastTransitionTime":"2025-12-15T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.512300 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.512377 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.512392 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.512415 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.512428 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:41Z","lastTransitionTime":"2025-12-15T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.615410 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.615468 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.615482 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.615502 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.615516 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:41Z","lastTransitionTime":"2025-12-15T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.719073 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.719136 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.719149 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.719175 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.719190 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:41Z","lastTransitionTime":"2025-12-15T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.822434 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.822965 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.822976 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.822993 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.823005 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:41Z","lastTransitionTime":"2025-12-15T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.925808 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.925859 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.925872 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.925889 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.925905 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:41Z","lastTransitionTime":"2025-12-15T06:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.949661 4876 generic.go:334] "Generic (PLEG): container finished" podID="9f253db4-4bae-4bc6-ae0d-a15c6a19ab86" containerID="60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063" exitCode=0 Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.949724 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" event={"ID":"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86","Type":"ContainerDied","Data":"60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.954742 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d"} Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.964990 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.979844 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:41 crc kubenswrapper[4876]: I1215 06:51:41.997832 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:41Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.010303 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.024990 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.029617 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.029647 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.029655 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.029669 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.029679 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:42Z","lastTransitionTime":"2025-12-15T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.035490 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.047762 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.062370 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.077293 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.092660 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.111307 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.132039 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.132093 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.132124 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.132146 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.132159 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:42Z","lastTransitionTime":"2025-12-15T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.147224 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.166908 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.184517 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.198680 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.234898 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.234949 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.234958 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.234973 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.234982 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:42Z","lastTransitionTime":"2025-12-15T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.337841 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.337897 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.337912 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.337929 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.337941 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:42Z","lastTransitionTime":"2025-12-15T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.440284 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.440320 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.440330 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.440346 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.440357 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:42Z","lastTransitionTime":"2025-12-15T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.542874 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.542921 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.542933 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.542951 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.542963 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:42Z","lastTransitionTime":"2025-12-15T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.645522 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.645555 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.645562 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.645576 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.645585 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:42Z","lastTransitionTime":"2025-12-15T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.705455 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.705544 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.705598 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:42 crc kubenswrapper[4876]: E1215 06:51:42.705645 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:42 crc kubenswrapper[4876]: E1215 06:51:42.705724 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:42 crc kubenswrapper[4876]: E1215 06:51:42.705820 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.748829 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.748874 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.748891 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.748913 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.748928 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:42Z","lastTransitionTime":"2025-12-15T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.851979 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.852041 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.852059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.852085 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.852134 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:42Z","lastTransitionTime":"2025-12-15T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.954922 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.954973 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.954987 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.955006 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.955018 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:42Z","lastTransitionTime":"2025-12-15T06:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.960180 4876 generic.go:334] "Generic (PLEG): container finished" podID="9f253db4-4bae-4bc6-ae0d-a15c6a19ab86" containerID="94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2" exitCode=0 Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.960260 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" event={"ID":"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86","Type":"ContainerDied","Data":"94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2"} Dec 15 06:51:42 crc kubenswrapper[4876]: I1215 06:51:42.981179 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:42Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.003648 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.038549 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.063828 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.063900 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.063931 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.063956 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.063973 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.068082 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.099758 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.113269 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.127058 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.149079 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.164789 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.166838 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.166931 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.166950 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.166986 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.167000 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.176017 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.189695 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.203585 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.215354 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.215389 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.215400 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.215415 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.215425 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.217087 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: E1215 06:51:43.230514 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.237145 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.237306 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.237348 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.237360 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.237378 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.237390 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.250439 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: E1215 06:51:43.253804 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.259064 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.259163 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.259236 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.259259 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.259273 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: E1215 06:51:43.273442 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.278648 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.278684 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.278696 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.278719 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.278731 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: E1215 06:51:43.292159 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.295913 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.295968 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.295984 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.296007 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.296021 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: E1215 06:51:43.309437 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:43 crc kubenswrapper[4876]: E1215 06:51:43.309636 4876 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.311856 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.311943 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.311963 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.311988 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.312023 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.415638 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.415683 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.415695 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.415713 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.415726 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.518998 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.519063 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.519073 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.519092 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.519143 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.623478 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.623550 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.623568 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.623589 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.623604 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.726846 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.726907 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.726921 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.726940 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.726954 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.830245 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.830286 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.830295 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.830312 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.830324 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.933500 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.933596 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.933623 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.934094 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.934399 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:43Z","lastTransitionTime":"2025-12-15T06:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.969372 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.972884 4876 generic.go:334] "Generic (PLEG): container finished" podID="9f253db4-4bae-4bc6-ae0d-a15c6a19ab86" containerID="3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9" exitCode=0 Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.972943 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" event={"ID":"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86","Type":"ContainerDied","Data":"3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9"} Dec 15 06:51:43 crc kubenswrapper[4876]: I1215 06:51:43.992275 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:43Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.022477 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.035182 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.038125 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.038175 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.038210 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.038236 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.038248 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:44Z","lastTransitionTime":"2025-12-15T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.052592 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.067947 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.085468 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.105612 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.117772 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.132866 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.143645 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.143707 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.143727 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.143753 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.143771 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:44Z","lastTransitionTime":"2025-12-15T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.163799 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.181407 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.195859 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.208918 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.218951 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.235265 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.245936 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.245985 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.245996 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.246015 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.246027 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:44Z","lastTransitionTime":"2025-12-15T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.349672 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.349723 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.349734 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.349754 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.349767 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:44Z","lastTransitionTime":"2025-12-15T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.453589 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.453650 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.453663 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.453684 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.453697 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:44Z","lastTransitionTime":"2025-12-15T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.555632 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.555702 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.555722 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.555744 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.555845 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:44Z","lastTransitionTime":"2025-12-15T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.659856 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.659899 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.659911 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.659929 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.659941 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:44Z","lastTransitionTime":"2025-12-15T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.705031 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.705147 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:44 crc kubenswrapper[4876]: E1215 06:51:44.705239 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:44 crc kubenswrapper[4876]: E1215 06:51:44.705303 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.705781 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:44 crc kubenswrapper[4876]: E1215 06:51:44.705933 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.732209 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.748293 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.761068 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.762964 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.762988 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.762999 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.763013 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.763024 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:44Z","lastTransitionTime":"2025-12-15T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.784288 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.796984 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.815562 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.831291 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.848853 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.859076 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.866069 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.866215 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.866229 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.866255 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.866270 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:44Z","lastTransitionTime":"2025-12-15T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.869941 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.882442 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.896067 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.907758 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.921361 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.936429 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.968655 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.968690 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.968705 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.968723 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.968734 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:44Z","lastTransitionTime":"2025-12-15T06:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.978098 4876 generic.go:334] "Generic (PLEG): container finished" podID="9f253db4-4bae-4bc6-ae0d-a15c6a19ab86" containerID="26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d" exitCode=0 Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.978159 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" event={"ID":"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86","Type":"ContainerDied","Data":"26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d"} Dec 15 06:51:44 crc kubenswrapper[4876]: I1215 06:51:44.990475 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.000491 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.023212 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.047417 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.066801 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.071671 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.071751 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.071771 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.071798 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.071814 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:45Z","lastTransitionTime":"2025-12-15T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.079796 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.094140 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.105770 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.123346 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.138791 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.152749 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.163743 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.175343 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.175396 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.175410 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.175437 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.175453 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:45Z","lastTransitionTime":"2025-12-15T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.177528 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.192836 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.208340 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.278612 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.279153 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.279167 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.279189 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.279205 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:45Z","lastTransitionTime":"2025-12-15T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.381879 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.381934 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.381947 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.381967 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.381979 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:45Z","lastTransitionTime":"2025-12-15T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.485593 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.485658 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.485672 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.485699 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.485718 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:45Z","lastTransitionTime":"2025-12-15T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.588602 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.588658 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.588669 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.588691 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.588705 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:45Z","lastTransitionTime":"2025-12-15T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.698876 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.698948 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.698968 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.698993 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.699012 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:45Z","lastTransitionTime":"2025-12-15T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.802295 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.802351 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.802361 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.802374 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.802382 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:45Z","lastTransitionTime":"2025-12-15T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.904645 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.904684 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.904695 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.904714 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.904727 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:45Z","lastTransitionTime":"2025-12-15T06:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.986750 4876 generic.go:334] "Generic (PLEG): container finished" podID="9f253db4-4bae-4bc6-ae0d-a15c6a19ab86" containerID="a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0" exitCode=0 Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.986860 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" event={"ID":"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86","Type":"ContainerDied","Data":"a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.993907 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9"} Dec 15 06:51:45 crc kubenswrapper[4876]: I1215 06:51:45.994365 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.008086 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.008192 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.008216 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.008241 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.008260 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:46Z","lastTransitionTime":"2025-12-15T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.008562 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.029684 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.046437 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.064483 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.077868 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.079511 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.094847 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.110817 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.110864 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.110883 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.110934 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.110951 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:46Z","lastTransitionTime":"2025-12-15T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.113767 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.132876 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.148637 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.165064 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.176965 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.198002 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.213071 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.213098 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.213118 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.213131 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.213140 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:46Z","lastTransitionTime":"2025-12-15T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.219241 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.233860 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.247322 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.261722 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.278231 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.289476 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.303014 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.315701 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.315756 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.315768 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.315793 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.315805 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:46Z","lastTransitionTime":"2025-12-15T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.317617 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.328790 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.338824 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.348016 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.360503 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.381953 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.397935 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.418869 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.418928 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.418941 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.418962 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.418975 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:46Z","lastTransitionTime":"2025-12-15T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.419785 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.433858 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.449087 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.472349 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:46Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.522729 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.522863 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.522892 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.522924 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.522947 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:46Z","lastTransitionTime":"2025-12-15T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.625739 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.626026 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.626137 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.626240 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.626318 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:46Z","lastTransitionTime":"2025-12-15T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.705443 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.705507 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:46 crc kubenswrapper[4876]: E1215 06:51:46.705557 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:46 crc kubenswrapper[4876]: E1215 06:51:46.705625 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.705502 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:46 crc kubenswrapper[4876]: E1215 06:51:46.705918 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.729188 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.729239 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.729256 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.729275 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.729290 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:46Z","lastTransitionTime":"2025-12-15T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.831026 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.832294 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.832334 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.832347 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.832361 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.832372 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:46Z","lastTransitionTime":"2025-12-15T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.934266 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.934309 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.934321 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.934339 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:46 crc kubenswrapper[4876]: I1215 06:51:46.934351 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:46Z","lastTransitionTime":"2025-12-15T06:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:46.999957 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" event={"ID":"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86","Type":"ContainerStarted","Data":"97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.000322 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.019870 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.023296 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.032009 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.036300 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.036341 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.036350 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.036363 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.036380 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:47Z","lastTransitionTime":"2025-12-15T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.048250 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.065522 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.081236 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.096330 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.106596 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.120197 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.134471 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.138063 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.138117 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.138130 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.138148 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.138159 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:47Z","lastTransitionTime":"2025-12-15T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.146328 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.159213 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.173572 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.190664 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.208807 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.224274 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.234432 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.240344 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.240370 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.240378 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.240390 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.240399 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:47Z","lastTransitionTime":"2025-12-15T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.251743 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.271991 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.286978 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.298570 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.308899 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.318211 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.329589 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.341849 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.342996 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.343041 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.343056 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.343077 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.343093 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:47Z","lastTransitionTime":"2025-12-15T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.358946 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.376892 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.388901 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.399666 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.411853 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.427925 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:47Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.445820 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.445860 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.445870 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.445888 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.445898 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:47Z","lastTransitionTime":"2025-12-15T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.548403 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.548433 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.548443 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.548456 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.548465 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:47Z","lastTransitionTime":"2025-12-15T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.650921 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.650966 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.650977 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.650994 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.651007 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:47Z","lastTransitionTime":"2025-12-15T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.753910 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.753947 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.753958 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.753975 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.753987 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:47Z","lastTransitionTime":"2025-12-15T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.857611 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.857645 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.857668 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.857683 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.857693 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:47Z","lastTransitionTime":"2025-12-15T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.960215 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.960286 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.960300 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.960315 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:47 crc kubenswrapper[4876]: I1215 06:51:47.960327 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:47Z","lastTransitionTime":"2025-12-15T06:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.006272 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/0.log" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.009192 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9" exitCode=1 Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.009243 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.010036 4876 scope.go:117] "RemoveContainer" containerID="e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.042568 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.062727 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.063984 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.064010 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.064017 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.064032 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.064042 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:48Z","lastTransitionTime":"2025-12-15T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.081207 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.096773 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.112874 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.137285 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"message\\\":\\\"m/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:47.592626 6178 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:47.593265 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1215 06:51:47.593318 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1215 06:51:47.593366 6178 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:47.593381 6178 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:47.593378 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1215 06:51:47.593403 6178 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1215 06:51:47.593413 6178 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:47.593440 6178 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1215 06:51:47.595297 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1215 06:51:47.595321 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1215 06:51:47.595354 6178 factory.go:656] Stopping watch factory\\\\nI1215 06:51:47.595372 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1215 06:51:47.595410 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1215 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.158072 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.165887 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.165938 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.165951 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.165972 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.165985 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:48Z","lastTransitionTime":"2025-12-15T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.183885 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.196882 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.213140 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.234753 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.252325 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.269333 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.269432 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.269455 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.269483 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.269507 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:48Z","lastTransitionTime":"2025-12-15T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.269810 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.281407 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.296648 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:48Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.372335 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.372367 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.372375 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.372389 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.372397 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:48Z","lastTransitionTime":"2025-12-15T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.474421 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.474453 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.474462 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.474478 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.474489 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:48Z","lastTransitionTime":"2025-12-15T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.474974 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.475094 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:52:04.47506728 +0000 UTC m=+50.046210191 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.575749 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.575815 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.575844 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.575873 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.575880 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.575907 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.575919 4876 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.575947 4876 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.575972 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:04.575953826 +0000 UTC m=+50.147096737 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.575992 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:04.575984457 +0000 UTC m=+50.147127368 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.575990 4876 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.576038 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.576059 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.576074 4876 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.576060 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:04.576042848 +0000 UTC m=+50.147185759 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.576137 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:04.57612622 +0000 UTC m=+50.147269231 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.577307 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.577349 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.577361 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.577376 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.577388 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:48Z","lastTransitionTime":"2025-12-15T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.679315 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.679571 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.679639 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.679720 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.679785 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:48Z","lastTransitionTime":"2025-12-15T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.704996 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.705131 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.705211 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.705482 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.705555 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:48 crc kubenswrapper[4876]: E1215 06:51:48.705343 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.783564 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.783623 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.783634 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.783650 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.783664 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:48Z","lastTransitionTime":"2025-12-15T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.885720 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.885757 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.885767 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.885780 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.885790 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:48Z","lastTransitionTime":"2025-12-15T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.987794 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.987832 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.987839 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.987853 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:48 crc kubenswrapper[4876]: I1215 06:51:48.987863 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:48Z","lastTransitionTime":"2025-12-15T06:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.015998 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/0.log" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.020006 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5"} Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.020778 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.042485 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.057248 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.077844 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.090455 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.090496 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.090505 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.090518 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.090527 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:49Z","lastTransitionTime":"2025-12-15T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.092356 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.105964 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.127913 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"message\\\":\\\"m/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:47.592626 6178 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:47.593265 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1215 06:51:47.593318 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1215 06:51:47.593366 6178 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:47.593381 6178 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:47.593378 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1215 06:51:47.593403 6178 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1215 06:51:47.593413 6178 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:47.593440 6178 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1215 06:51:47.595297 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1215 06:51:47.595321 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1215 06:51:47.595354 6178 factory.go:656] Stopping watch factory\\\\nI1215 06:51:47.595372 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1215 06:51:47.595410 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1215 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.140787 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.162322 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.177576 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.193200 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.193229 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.193238 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.193252 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.193260 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:49Z","lastTransitionTime":"2025-12-15T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.193868 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.207485 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.220417 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.232445 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.246266 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.262338 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:49Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.296318 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.296385 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.296404 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.296430 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.296448 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:49Z","lastTransitionTime":"2025-12-15T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.398501 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.398543 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.398555 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.398571 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.398584 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:49Z","lastTransitionTime":"2025-12-15T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.501522 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.501557 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.501566 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.501580 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.501589 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:49Z","lastTransitionTime":"2025-12-15T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.603855 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.603893 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.603912 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.603930 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.603942 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:49Z","lastTransitionTime":"2025-12-15T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.706502 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.706883 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.706896 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.706910 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.706919 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:49Z","lastTransitionTime":"2025-12-15T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.809020 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.809094 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.809153 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.809177 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.809189 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:49Z","lastTransitionTime":"2025-12-15T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.912373 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.912711 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.912796 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.912867 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:49 crc kubenswrapper[4876]: I1215 06:51:49.912931 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:49Z","lastTransitionTime":"2025-12-15T06:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.015396 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.015456 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.015472 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.015492 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.015504 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:50Z","lastTransitionTime":"2025-12-15T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.027090 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/1.log" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.027794 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/0.log" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.031346 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5" exitCode=1 Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.031382 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.031428 4876 scope.go:117] "RemoveContainer" containerID="e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.032632 4876 scope.go:117] "RemoveContainer" containerID="43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5" Dec 15 06:51:50 crc kubenswrapper[4876]: E1215 06:51:50.032902 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.055411 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.088969 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.108316 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.118271 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.118336 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.118354 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.118383 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.118406 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:50Z","lastTransitionTime":"2025-12-15T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.125735 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.146590 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.164262 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.190494 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"message\\\":\\\"m/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:47.592626 6178 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:47.593265 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1215 06:51:47.593318 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1215 06:51:47.593366 6178 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:47.593381 6178 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:47.593378 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1215 06:51:47.593403 6178 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1215 06:51:47.593413 6178 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:47.593440 6178 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1215 06:51:47.595297 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1215 06:51:47.595321 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1215 06:51:47.595354 6178 factory.go:656] Stopping watch factory\\\\nI1215 06:51:47.595372 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1215 06:51:47.595410 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1215 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.211618 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.221805 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.221842 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.221856 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.221873 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.221887 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:50Z","lastTransitionTime":"2025-12-15T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.237747 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.252700 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.270069 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.286870 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.309036 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.324080 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.324172 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.324193 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.324217 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.324234 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:50Z","lastTransitionTime":"2025-12-15T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.325051 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.338917 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.427842 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.428000 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.428027 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.428054 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.428075 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:50Z","lastTransitionTime":"2025-12-15T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.530283 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.530312 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.530320 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.530332 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.530341 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:50Z","lastTransitionTime":"2025-12-15T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.583218 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.596654 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.608183 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.621083 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.633231 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.633272 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.633283 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.633299 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.633351 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:50Z","lastTransitionTime":"2025-12-15T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.634884 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.647906 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.660035 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.672522 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.684906 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.696643 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.704627 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.704662 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:50 crc kubenswrapper[4876]: E1215 06:51:50.704762 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.704774 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:50 crc kubenswrapper[4876]: E1215 06:51:50.704869 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:50 crc kubenswrapper[4876]: E1215 06:51:50.704949 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.721172 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e7faf9aa2d504e54a96ef89e53f9ee3212e70eb87bf172891a495392d807f6b9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"message\\\":\\\"m/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:47.592626 6178 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:47.593265 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1215 06:51:47.593318 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1215 06:51:47.593366 6178 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:47.593381 6178 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:47.593378 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1215 06:51:47.593403 6178 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1215 06:51:47.593413 6178 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:47.593440 6178 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1215 06:51:47.595297 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1215 06:51:47.595321 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1215 06:51:47.595354 6178 factory.go:656] Stopping watch factory\\\\nI1215 06:51:47.595372 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1215 06:51:47.595410 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1215 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.735870 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.736005 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.736087 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.736195 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.736254 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:50Z","lastTransitionTime":"2025-12-15T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.740525 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.755156 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.770845 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.783979 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.797304 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:50Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.839271 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.839310 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.839322 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.839342 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.839354 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:50Z","lastTransitionTime":"2025-12-15T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.942215 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.942681 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.942916 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.943145 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:50 crc kubenswrapper[4876]: I1215 06:51:50.943339 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:50Z","lastTransitionTime":"2025-12-15T06:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.035962 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/1.log" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.039391 4876 scope.go:117] "RemoveContainer" containerID="43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5" Dec 15 06:51:51 crc kubenswrapper[4876]: E1215 06:51:51.039549 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.045192 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.045229 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.045241 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.045254 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.045263 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:51Z","lastTransitionTime":"2025-12-15T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.051481 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.064880 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.077662 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.089276 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.098755 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.109447 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.123023 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.137840 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.148284 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.148328 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.148341 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.148362 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.148376 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:51Z","lastTransitionTime":"2025-12-15T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.152310 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.166175 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.186932 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.209644 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.227912 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.251169 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.251500 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.251610 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.251714 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.251831 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:51Z","lastTransitionTime":"2025-12-15T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.252436 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.277152 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:51Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.354754 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.354825 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.354841 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.354864 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.354880 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:51Z","lastTransitionTime":"2025-12-15T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.458028 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.458090 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.458123 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.458149 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.458165 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:51Z","lastTransitionTime":"2025-12-15T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.561728 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.561808 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.561825 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.561848 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.561868 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:51Z","lastTransitionTime":"2025-12-15T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.664541 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.664606 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.664627 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.664651 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.664669 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:51Z","lastTransitionTime":"2025-12-15T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.767438 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.767511 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.767538 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.767568 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.767590 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:51Z","lastTransitionTime":"2025-12-15T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.870886 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.870948 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.870965 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.870988 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.871007 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:51Z","lastTransitionTime":"2025-12-15T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.974809 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.974896 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.974936 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.974977 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.975002 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:51Z","lastTransitionTime":"2025-12-15T06:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.995832 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz"] Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.996389 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.998921 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 15 06:51:51 crc kubenswrapper[4876]: I1215 06:51:51.999206 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.009082 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/40cfea4f-327e-4198-a6ea-b4382d20ba28-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.009128 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/40cfea4f-327e-4198-a6ea-b4382d20ba28-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.009181 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/40cfea4f-327e-4198-a6ea-b4382d20ba28-env-overrides\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.009219 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2tlt\" (UniqueName: \"kubernetes.io/projected/40cfea4f-327e-4198-a6ea-b4382d20ba28-kube-api-access-n2tlt\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.015660 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.033354 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.046453 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.060577 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.073165 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.077206 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.077293 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.077361 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.077444 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.077499 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:52Z","lastTransitionTime":"2025-12-15T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.085720 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.096399 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.107009 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.109485 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/40cfea4f-327e-4198-a6ea-b4382d20ba28-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.109615 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/40cfea4f-327e-4198-a6ea-b4382d20ba28-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.109697 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/40cfea4f-327e-4198-a6ea-b4382d20ba28-env-overrides\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.109782 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2tlt\" (UniqueName: \"kubernetes.io/projected/40cfea4f-327e-4198-a6ea-b4382d20ba28-kube-api-access-n2tlt\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.110617 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/40cfea4f-327e-4198-a6ea-b4382d20ba28-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.110791 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/40cfea4f-327e-4198-a6ea-b4382d20ba28-env-overrides\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.115001 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/40cfea4f-327e-4198-a6ea-b4382d20ba28-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.123512 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.130977 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2tlt\" (UniqueName: \"kubernetes.io/projected/40cfea4f-327e-4198-a6ea-b4382d20ba28-kube-api-access-n2tlt\") pod \"ovnkube-control-plane-749d76644c-5kzkz\" (UID: \"40cfea4f-327e-4198-a6ea-b4382d20ba28\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.147076 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.162891 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.177931 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.179746 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.179823 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.179840 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.179856 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.179868 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:52Z","lastTransitionTime":"2025-12-15T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.193976 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.204891 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.225183 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.237876 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:52Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.282819 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.282850 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.282860 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.282873 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.282882 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:52Z","lastTransitionTime":"2025-12-15T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.312646 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" Dec 15 06:51:52 crc kubenswrapper[4876]: W1215 06:51:52.329874 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40cfea4f_327e_4198_a6ea_b4382d20ba28.slice/crio-1f6f0bf0714d72df9c206eeb190d8b298152009781ba3807fb53c051da3e0290 WatchSource:0}: Error finding container 1f6f0bf0714d72df9c206eeb190d8b298152009781ba3807fb53c051da3e0290: Status 404 returned error can't find the container with id 1f6f0bf0714d72df9c206eeb190d8b298152009781ba3807fb53c051da3e0290 Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.385844 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.386386 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.386929 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.387244 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.387472 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:52Z","lastTransitionTime":"2025-12-15T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.490830 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.490860 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.490870 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.490885 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.490897 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:52Z","lastTransitionTime":"2025-12-15T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.594639 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.594683 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.594696 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.594713 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.594725 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:52Z","lastTransitionTime":"2025-12-15T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.698081 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.698171 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.698191 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.698213 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.698228 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:52Z","lastTransitionTime":"2025-12-15T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.705183 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:52 crc kubenswrapper[4876]: E1215 06:51:52.705309 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.705434 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:52 crc kubenswrapper[4876]: E1215 06:51:52.705553 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.705623 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:52 crc kubenswrapper[4876]: E1215 06:51:52.705682 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.800735 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.800786 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.800802 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.800822 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.800835 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:52Z","lastTransitionTime":"2025-12-15T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.904107 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.904205 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.904228 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.904256 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:52 crc kubenswrapper[4876]: I1215 06:51:52.904274 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:52Z","lastTransitionTime":"2025-12-15T06:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.007208 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.007242 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.007252 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.007267 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.007279 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.048302 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" event={"ID":"40cfea4f-327e-4198-a6ea-b4382d20ba28","Type":"ContainerStarted","Data":"06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.048366 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" event={"ID":"40cfea4f-327e-4198-a6ea-b4382d20ba28","Type":"ContainerStarted","Data":"1f6f0bf0714d72df9c206eeb190d8b298152009781ba3807fb53c051da3e0290"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.110703 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.111052 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.111061 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.111076 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.111091 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.119472 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-rzth5"] Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.120274 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.120408 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.121675 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mkcc\" (UniqueName: \"kubernetes.io/projected/daee20e1-a017-4464-9626-ea2c52cfae57-kube-api-access-4mkcc\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.121722 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.139586 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.156344 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.170163 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.192819 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.209634 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.214209 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.214252 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.214266 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.214288 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.214303 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.222853 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mkcc\" (UniqueName: \"kubernetes.io/projected/daee20e1-a017-4464-9626-ea2c52cfae57-kube-api-access-4mkcc\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.222916 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.223054 4876 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.223148 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs podName:daee20e1-a017-4464-9626-ea2c52cfae57 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:53.72311604 +0000 UTC m=+39.294258951 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs") pod "network-metrics-daemon-rzth5" (UID: "daee20e1-a017-4464-9626-ea2c52cfae57") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.226165 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.242761 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mkcc\" (UniqueName: \"kubernetes.io/projected/daee20e1-a017-4464-9626-ea2c52cfae57-kube-api-access-4mkcc\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.244667 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.260247 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.274549 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.299118 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.313050 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.321847 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.321890 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.321903 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.321958 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.321975 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.326191 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.340320 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.355152 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.368928 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.381727 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.397867 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.424685 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.424733 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.424747 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.424764 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.424775 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.524352 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.524694 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.524797 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.524881 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.524946 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.540810 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.544947 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.544975 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.544984 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.544998 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.545008 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.562823 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.567281 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.567335 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.567353 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.567377 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.567395 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.580631 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.584850 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.584875 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.584883 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.584896 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.584906 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.602989 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.606565 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.606612 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.606630 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.606654 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.606671 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.620549 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:53Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.620694 4876 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.622992 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.623061 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.623074 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.623102 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.623122 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.725748 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.725901 4876 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:51:53 crc kubenswrapper[4876]: E1215 06:51:53.726019 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs podName:daee20e1-a017-4464-9626-ea2c52cfae57 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:54.725982102 +0000 UTC m=+40.297125013 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs") pod "network-metrics-daemon-rzth5" (UID: "daee20e1-a017-4464-9626-ea2c52cfae57") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.726254 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.726315 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.726331 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.726350 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.726364 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.829337 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.829382 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.829395 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.829414 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.829427 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.931426 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.931481 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.931522 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.931541 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:53 crc kubenswrapper[4876]: I1215 06:51:53.931552 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:53Z","lastTransitionTime":"2025-12-15T06:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.034272 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.034320 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.034330 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.034351 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.034364 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:54Z","lastTransitionTime":"2025-12-15T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.057669 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" event={"ID":"40cfea4f-327e-4198-a6ea-b4382d20ba28","Type":"ContainerStarted","Data":"814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.084544 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.108065 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.129229 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.137268 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.137485 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.137589 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.137664 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.137729 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:54Z","lastTransitionTime":"2025-12-15T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.145847 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.159071 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.171895 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.184815 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.195516 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.207311 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.226284 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.239593 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.239624 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.239633 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.239648 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.239658 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:54Z","lastTransitionTime":"2025-12-15T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.240941 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.250482 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.268322 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.280512 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.291834 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.303322 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.311199 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.341762 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.341815 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.341834 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.341856 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.341873 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:54Z","lastTransitionTime":"2025-12-15T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.444826 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.444870 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.444883 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.444903 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.444916 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:54Z","lastTransitionTime":"2025-12-15T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.547972 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.548038 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.548060 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.548086 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.548159 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:54Z","lastTransitionTime":"2025-12-15T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.650650 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.650711 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.650733 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.650764 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.650787 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:54Z","lastTransitionTime":"2025-12-15T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.704600 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.704688 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:54 crc kubenswrapper[4876]: E1215 06:51:54.704770 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.704822 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:54 crc kubenswrapper[4876]: E1215 06:51:54.704960 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.704996 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:54 crc kubenswrapper[4876]: E1215 06:51:54.705093 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:54 crc kubenswrapper[4876]: E1215 06:51:54.705290 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.724164 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.736697 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:54 crc kubenswrapper[4876]: E1215 06:51:54.736943 4876 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:51:54 crc kubenswrapper[4876]: E1215 06:51:54.737026 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs podName:daee20e1-a017-4464-9626-ea2c52cfae57 nodeName:}" failed. No retries permitted until 2025-12-15 06:51:56.737003618 +0000 UTC m=+42.308146559 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs") pod "network-metrics-daemon-rzth5" (UID: "daee20e1-a017-4464-9626-ea2c52cfae57") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.742227 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.753765 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.753827 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.753846 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.753871 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.753888 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:54Z","lastTransitionTime":"2025-12-15T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.762061 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.785316 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.805651 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.821375 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.838189 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.848359 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.859789 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.859879 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.859893 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.859913 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.859926 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:54Z","lastTransitionTime":"2025-12-15T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.864175 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.892284 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.909269 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.926737 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.941585 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.953688 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.963321 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.963360 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.963374 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.963394 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.963419 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:54Z","lastTransitionTime":"2025-12-15T06:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.976893 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:54 crc kubenswrapper[4876]: I1215 06:51:54.990485 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:54Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.004026 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:51:55Z is after 2025-08-24T17:21:41Z" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.065848 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.065919 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.065936 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.065957 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.065971 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:55Z","lastTransitionTime":"2025-12-15T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.169256 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.169330 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.169344 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.169368 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.169382 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:55Z","lastTransitionTime":"2025-12-15T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.272391 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.272454 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.272468 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.272488 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.272741 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:55Z","lastTransitionTime":"2025-12-15T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.376651 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.376691 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.376706 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.376725 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.376737 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:55Z","lastTransitionTime":"2025-12-15T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.480641 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.480710 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.480732 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.480759 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.480783 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:55Z","lastTransitionTime":"2025-12-15T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.584406 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.584470 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.584489 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.584518 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.584538 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:55Z","lastTransitionTime":"2025-12-15T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.687291 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.687328 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.687340 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.687356 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.687367 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:55Z","lastTransitionTime":"2025-12-15T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.790955 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.791017 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.791034 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.791060 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.791078 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:55Z","lastTransitionTime":"2025-12-15T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.894001 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.894048 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.894059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.894075 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.894085 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:55Z","lastTransitionTime":"2025-12-15T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.996792 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.996833 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.996843 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.996858 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:55 crc kubenswrapper[4876]: I1215 06:51:55.996869 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:55Z","lastTransitionTime":"2025-12-15T06:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.099454 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.099496 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.099506 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.099521 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.099532 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:56Z","lastTransitionTime":"2025-12-15T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.202528 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.202602 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.202620 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.202681 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.202696 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:56Z","lastTransitionTime":"2025-12-15T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.305846 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.305952 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.305966 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.305990 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.306006 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:56Z","lastTransitionTime":"2025-12-15T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.410028 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.410098 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.410135 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.410160 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.410173 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:56Z","lastTransitionTime":"2025-12-15T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.514297 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.514393 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.514420 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.514452 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.514475 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:56Z","lastTransitionTime":"2025-12-15T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.617456 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.617510 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.617522 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.617548 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.617562 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:56Z","lastTransitionTime":"2025-12-15T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.704917 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.704976 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:56 crc kubenswrapper[4876]: E1215 06:51:56.705273 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.705363 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.705506 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:56 crc kubenswrapper[4876]: E1215 06:51:56.705658 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:56 crc kubenswrapper[4876]: E1215 06:51:56.705727 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:56 crc kubenswrapper[4876]: E1215 06:51:56.705822 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.720722 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.720774 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.720785 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.720810 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.720827 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:56Z","lastTransitionTime":"2025-12-15T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.761997 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:56 crc kubenswrapper[4876]: E1215 06:51:56.762313 4876 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:51:56 crc kubenswrapper[4876]: E1215 06:51:56.762453 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs podName:daee20e1-a017-4464-9626-ea2c52cfae57 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:00.762420202 +0000 UTC m=+46.333563323 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs") pod "network-metrics-daemon-rzth5" (UID: "daee20e1-a017-4464-9626-ea2c52cfae57") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.825262 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.825325 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.825343 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.825368 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.825387 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:56Z","lastTransitionTime":"2025-12-15T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.928325 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.928394 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.928435 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.928462 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:56 crc kubenswrapper[4876]: I1215 06:51:56.928481 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:56Z","lastTransitionTime":"2025-12-15T06:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.032682 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.032812 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.032831 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.032856 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.032877 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:57Z","lastTransitionTime":"2025-12-15T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.136447 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.136514 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.136539 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.136571 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.136598 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:57Z","lastTransitionTime":"2025-12-15T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.239099 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.239199 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.239213 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.239230 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.239243 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:57Z","lastTransitionTime":"2025-12-15T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.341972 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.342009 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.342019 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.342036 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.342047 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:57Z","lastTransitionTime":"2025-12-15T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.445366 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.445473 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.445498 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.445527 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.445548 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:57Z","lastTransitionTime":"2025-12-15T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.548866 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.549264 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.549454 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.549659 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.549836 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:57Z","lastTransitionTime":"2025-12-15T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.653728 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.653803 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.653829 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.653862 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.653884 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:57Z","lastTransitionTime":"2025-12-15T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.756827 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.756908 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.756921 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.756934 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.756944 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:57Z","lastTransitionTime":"2025-12-15T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.860565 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.860639 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.860671 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.860719 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.860741 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:57Z","lastTransitionTime":"2025-12-15T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.963453 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.963515 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.963531 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.963556 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:57 crc kubenswrapper[4876]: I1215 06:51:57.963579 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:57Z","lastTransitionTime":"2025-12-15T06:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.065942 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.066024 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.066042 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.066057 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.066067 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:58Z","lastTransitionTime":"2025-12-15T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.169690 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.169733 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.169744 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.169759 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.169769 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:58Z","lastTransitionTime":"2025-12-15T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.273413 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.273475 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.273486 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.273503 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.273513 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:58Z","lastTransitionTime":"2025-12-15T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.376068 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.376108 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.376118 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.376159 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.376173 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:58Z","lastTransitionTime":"2025-12-15T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.478263 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.478326 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.478343 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.478366 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.478385 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:58Z","lastTransitionTime":"2025-12-15T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.580518 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.580584 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.580597 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.580622 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.580637 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:58Z","lastTransitionTime":"2025-12-15T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.683552 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.683610 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.683619 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.683638 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.683651 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:58Z","lastTransitionTime":"2025-12-15T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.705565 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.705678 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:51:58 crc kubenswrapper[4876]: E1215 06:51:58.705753 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.705598 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.705565 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:51:58 crc kubenswrapper[4876]: E1215 06:51:58.705840 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:51:58 crc kubenswrapper[4876]: E1215 06:51:58.705996 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:51:58 crc kubenswrapper[4876]: E1215 06:51:58.706169 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.786459 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.786505 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.786514 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.786530 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.786542 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:58Z","lastTransitionTime":"2025-12-15T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.889367 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.889423 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.889444 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.889469 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.889486 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:58Z","lastTransitionTime":"2025-12-15T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.992570 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.992638 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.992655 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.992679 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:58 crc kubenswrapper[4876]: I1215 06:51:58.992697 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:58Z","lastTransitionTime":"2025-12-15T06:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.095519 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.095569 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.095582 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.095604 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.095617 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:59Z","lastTransitionTime":"2025-12-15T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.198794 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.198887 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.198915 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.198947 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.198972 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:59Z","lastTransitionTime":"2025-12-15T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.301822 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.301913 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.301927 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.301948 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.301963 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:59Z","lastTransitionTime":"2025-12-15T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.405472 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.405534 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.405546 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.405566 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.405576 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:59Z","lastTransitionTime":"2025-12-15T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.509077 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.509165 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.509180 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.509200 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.509212 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:59Z","lastTransitionTime":"2025-12-15T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.612286 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.612317 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.612325 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.612338 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.612346 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:59Z","lastTransitionTime":"2025-12-15T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.715787 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.715845 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.715855 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.715877 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.715892 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:59Z","lastTransitionTime":"2025-12-15T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.818608 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.818684 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.818710 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.818744 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.818771 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:59Z","lastTransitionTime":"2025-12-15T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.921247 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.921300 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.921312 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.921328 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:51:59 crc kubenswrapper[4876]: I1215 06:51:59.921339 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:51:59Z","lastTransitionTime":"2025-12-15T06:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.024678 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.024727 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.024737 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.024757 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.024771 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:00Z","lastTransitionTime":"2025-12-15T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.127130 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.127170 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.127182 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.127200 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.127211 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:00Z","lastTransitionTime":"2025-12-15T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.230954 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.230990 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.231005 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.231021 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.231032 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:00Z","lastTransitionTime":"2025-12-15T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.335102 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.335194 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.335210 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.335233 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.335251 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:00Z","lastTransitionTime":"2025-12-15T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.438472 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.438509 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.438521 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.438576 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.438587 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:00Z","lastTransitionTime":"2025-12-15T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.481836 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.494439 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.500259 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.520153 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.536163 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.540912 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.540947 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.540959 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.540976 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.540985 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:00Z","lastTransitionTime":"2025-12-15T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.551515 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.563477 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.573081 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.585055 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.597841 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.615883 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.635404 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.643489 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.643546 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.643562 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.643577 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.643587 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:00Z","lastTransitionTime":"2025-12-15T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.650102 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.676487 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.694880 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.704861 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:00 crc kubenswrapper[4876]: E1215 06:52:00.705017 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.704894 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.704859 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:00 crc kubenswrapper[4876]: E1215 06:52:00.705153 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.704890 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:00 crc kubenswrapper[4876]: E1215 06:52:00.705434 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:00 crc kubenswrapper[4876]: E1215 06:52:00.705638 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.709097 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.734497 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.746965 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.747015 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.747026 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.747046 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.747425 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:00Z","lastTransitionTime":"2025-12-15T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.751591 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.766032 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:00Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.804518 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:00 crc kubenswrapper[4876]: E1215 06:52:00.804814 4876 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:52:00 crc kubenswrapper[4876]: E1215 06:52:00.804948 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs podName:daee20e1-a017-4464-9626-ea2c52cfae57 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:08.804921592 +0000 UTC m=+54.376064503 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs") pod "network-metrics-daemon-rzth5" (UID: "daee20e1-a017-4464-9626-ea2c52cfae57") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.850487 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.850554 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.850573 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.850593 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.850606 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:00Z","lastTransitionTime":"2025-12-15T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.953357 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.953394 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.953402 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.953415 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:00 crc kubenswrapper[4876]: I1215 06:52:00.953423 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:00Z","lastTransitionTime":"2025-12-15T06:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.055892 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.055934 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.055942 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.055958 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.055968 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:01Z","lastTransitionTime":"2025-12-15T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.159392 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.159452 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.159462 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.159487 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.159500 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:01Z","lastTransitionTime":"2025-12-15T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.262347 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.262388 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.262400 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.262415 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.262428 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:01Z","lastTransitionTime":"2025-12-15T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.365996 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.366040 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.366048 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.366063 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.366072 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:01Z","lastTransitionTime":"2025-12-15T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.469177 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.469254 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.469276 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.469309 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.469331 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:01Z","lastTransitionTime":"2025-12-15T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.572769 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.572840 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.572857 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.572884 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.572903 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:01Z","lastTransitionTime":"2025-12-15T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.674956 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.675002 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.675013 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.675032 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.675046 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:01Z","lastTransitionTime":"2025-12-15T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.777731 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.777802 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.777826 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.777859 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.777884 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:01Z","lastTransitionTime":"2025-12-15T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.880988 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.881073 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.881097 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.881171 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.881197 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:01Z","lastTransitionTime":"2025-12-15T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.984417 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.984469 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.984492 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.984513 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:01 crc kubenswrapper[4876]: I1215 06:52:01.984526 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:01Z","lastTransitionTime":"2025-12-15T06:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.086495 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.086577 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.086602 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.086632 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.086656 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:02Z","lastTransitionTime":"2025-12-15T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.189353 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.189422 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.189440 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.189467 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.189489 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:02Z","lastTransitionTime":"2025-12-15T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.292636 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.292699 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.292723 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.292755 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.292777 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:02Z","lastTransitionTime":"2025-12-15T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.396975 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.397063 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.397076 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.397094 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.397141 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:02Z","lastTransitionTime":"2025-12-15T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.500223 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.500312 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.500328 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.500351 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.500372 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:02Z","lastTransitionTime":"2025-12-15T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.603885 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.603942 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.603954 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.603976 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.603995 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:02Z","lastTransitionTime":"2025-12-15T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.705551 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:02 crc kubenswrapper[4876]: E1215 06:52:02.705961 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.706050 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.706083 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:02 crc kubenswrapper[4876]: E1215 06:52:02.706143 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:02 crc kubenswrapper[4876]: E1215 06:52:02.706286 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.706323 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:02 crc kubenswrapper[4876]: E1215 06:52:02.706421 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.709747 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.709838 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.709862 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.709893 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.709916 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:02Z","lastTransitionTime":"2025-12-15T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.812559 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.812639 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.812658 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.812681 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.812709 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:02Z","lastTransitionTime":"2025-12-15T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.915037 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.915084 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.915093 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.915134 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:02 crc kubenswrapper[4876]: I1215 06:52:02.915146 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:02Z","lastTransitionTime":"2025-12-15T06:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.019228 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.019308 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.019318 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.019344 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.019357 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.121413 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.121452 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.121466 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.121482 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.121494 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.225753 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.225802 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.225814 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.225830 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.225842 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.328634 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.328679 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.328687 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.328701 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.328709 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.432047 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.432165 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.432195 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.432228 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.432251 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.534947 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.535029 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.535052 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.535083 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.535145 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.638738 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.638797 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.638811 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.638831 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.638844 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.725244 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.725307 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.725329 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.725353 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.725374 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: E1215 06:52:03.742876 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:03Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.748433 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.748494 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.748505 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.748526 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.748538 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: E1215 06:52:03.768270 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:03Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.772768 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.772810 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.772820 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.772837 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.772850 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: E1215 06:52:03.791913 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:03Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.797923 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.798001 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.798019 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.798043 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.798057 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: E1215 06:52:03.816608 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:03Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.822670 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.822738 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.822754 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.822780 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.822794 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: E1215 06:52:03.841682 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:03Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:03 crc kubenswrapper[4876]: E1215 06:52:03.841833 4876 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.843885 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.844023 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.844142 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.844241 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.844372 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.947008 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.947053 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.947097 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.947147 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:03 crc kubenswrapper[4876]: I1215 06:52:03.947162 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:03Z","lastTransitionTime":"2025-12-15T06:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.049640 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.049687 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.049702 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.049722 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.049739 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:04Z","lastTransitionTime":"2025-12-15T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.152171 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.152247 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.152272 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.152299 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.152319 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:04Z","lastTransitionTime":"2025-12-15T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.255771 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.255825 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.255842 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.255866 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.255886 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:04Z","lastTransitionTime":"2025-12-15T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.360302 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.360418 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.360435 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.360457 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.360474 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:04Z","lastTransitionTime":"2025-12-15T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.464261 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.464389 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.464411 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.464434 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.464457 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:04Z","lastTransitionTime":"2025-12-15T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.552088 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.552336 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:52:36.552292549 +0000 UTC m=+82.123435500 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.568435 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.568498 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.568516 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.568542 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.568562 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:04Z","lastTransitionTime":"2025-12-15T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.653498 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.653600 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.653647 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.653684 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.653831 4876 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.653899 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:36.653879434 +0000 UTC m=+82.225022375 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.654225 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.654242 4876 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.654332 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:36.654310805 +0000 UTC m=+82.225453726 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.654252 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.654369 4876 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.654406 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:36.654396349 +0000 UTC m=+82.225539280 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.654427 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.654464 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.654484 4876 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.654552 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:36.654533922 +0000 UTC m=+82.225676873 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.671256 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.671313 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.671325 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.671345 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.671357 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:04Z","lastTransitionTime":"2025-12-15T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.705074 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.705125 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.705135 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.705241 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.705263 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.705350 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.705475 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:04 crc kubenswrapper[4876]: E1215 06:52:04.705613 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.718489 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.732743 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.756010 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.769858 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.773210 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.773326 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.773351 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.773404 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.773459 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:04Z","lastTransitionTime":"2025-12-15T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.784856 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.799022 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.814020 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.824539 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.833464 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.846355 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.861425 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.874639 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.875993 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.876055 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.876073 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.876098 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.876158 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:04Z","lastTransitionTime":"2025-12-15T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.896607 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.911254 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.926309 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.939808 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.950662 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.970650 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:04Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.978300 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.978340 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.978353 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.978370 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:04 crc kubenswrapper[4876]: I1215 06:52:04.978381 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:04Z","lastTransitionTime":"2025-12-15T06:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.081510 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.081546 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.081555 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.081569 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.081580 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:05Z","lastTransitionTime":"2025-12-15T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.183731 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.183767 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.183778 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.183797 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.183808 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:05Z","lastTransitionTime":"2025-12-15T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.286075 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.286182 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.286204 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.286230 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.286247 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:05Z","lastTransitionTime":"2025-12-15T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.389374 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.389464 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.389501 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.389532 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.389555 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:05Z","lastTransitionTime":"2025-12-15T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.492395 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.492437 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.492452 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.492471 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.492485 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:05Z","lastTransitionTime":"2025-12-15T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.595161 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.595246 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.595277 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.595313 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.595329 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:05Z","lastTransitionTime":"2025-12-15T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.698683 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.698727 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.698739 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.698757 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.698770 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:05Z","lastTransitionTime":"2025-12-15T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.705883 4876 scope.go:117] "RemoveContainer" containerID="43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.801755 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.802032 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.802044 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.802059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.802071 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:05Z","lastTransitionTime":"2025-12-15T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.904338 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.904373 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.904382 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.904399 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:05 crc kubenswrapper[4876]: I1215 06:52:05.904408 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:05Z","lastTransitionTime":"2025-12-15T06:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.006951 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.006984 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.006992 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.007005 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.007014 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:06Z","lastTransitionTime":"2025-12-15T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.098260 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/1.log" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.100196 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.101157 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.109405 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.109446 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.109457 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.109471 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.109480 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:06Z","lastTransitionTime":"2025-12-15T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.119092 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.141203 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.153816 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.174220 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.184874 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.194098 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.211234 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.211266 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.211274 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.211291 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.211300 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:06Z","lastTransitionTime":"2025-12-15T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.261214 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.277718 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.292570 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.303824 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.312950 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.314040 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.314077 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.314087 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.314122 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.314137 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:06Z","lastTransitionTime":"2025-12-15T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.327295 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.343302 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.358848 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.370842 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.379731 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.389729 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.401396 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:06Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.415982 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.416018 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.416026 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.416040 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.416050 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:06Z","lastTransitionTime":"2025-12-15T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.518987 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.519033 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.519051 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.519077 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.519094 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:06Z","lastTransitionTime":"2025-12-15T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.622580 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.622629 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.622647 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.622669 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.622686 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:06Z","lastTransitionTime":"2025-12-15T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.704552 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.704641 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:06 crc kubenswrapper[4876]: E1215 06:52:06.704756 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.704872 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.704916 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:06 crc kubenswrapper[4876]: E1215 06:52:06.705020 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:06 crc kubenswrapper[4876]: E1215 06:52:06.705160 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:06 crc kubenswrapper[4876]: E1215 06:52:06.705297 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.725410 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.725468 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.725486 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.725511 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.725530 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:06Z","lastTransitionTime":"2025-12-15T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.828276 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.828310 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.828319 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.828333 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.828343 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:06Z","lastTransitionTime":"2025-12-15T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.930371 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.930410 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.930418 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.930431 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:06 crc kubenswrapper[4876]: I1215 06:52:06.930441 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:06Z","lastTransitionTime":"2025-12-15T06:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.033432 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.033472 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.033482 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.033498 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.033511 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:07Z","lastTransitionTime":"2025-12-15T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.106764 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/2.log" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.107456 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/1.log" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.110543 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f" exitCode=1 Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.110612 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.110680 4876 scope.go:117] "RemoveContainer" containerID="43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.111612 4876 scope.go:117] "RemoveContainer" containerID="d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f" Dec 15 06:52:07 crc kubenswrapper[4876]: E1215 06:52:07.111858 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.129189 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.135783 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.135861 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.135890 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.135921 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.135944 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:07Z","lastTransitionTime":"2025-12-15T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.149441 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.166855 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.186870 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.208603 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.224060 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.238991 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.239036 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.239047 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.239063 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.239076 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:07Z","lastTransitionTime":"2025-12-15T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.240208 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.253358 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.266172 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.282710 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.299702 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.322603 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.338058 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.342439 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.342485 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.342504 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.342526 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.342544 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:07Z","lastTransitionTime":"2025-12-15T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.351949 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.378856 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://43c180262bed5975775d28a7e1a8a9da781a48b85de7ec6e7a9840ef39052bc5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:51:49Z\\\",\\\"message\\\":\\\" (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853277 6324 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853369 6324 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853452 6324 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.853523 6324 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1215 06:51:48.853576 6324 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1215 06:51:48.854184 6324 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1215 06:51:48.854215 6324 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1215 06:51:48.854244 6324 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1215 06:51:48.854263 6324 factory.go:656] Stopping watch factory\\\\nI1215 06:51:48.854289 6324 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:06Z\\\",\\\"message\\\":\\\"d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568803 6542 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nI1215 06:52:06.568825 6542 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1215 06:52:06.568819 6542 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568778 6542 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-controller for network=default\\\\nF1215 06:52:06.568931 6542 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.392811 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.407472 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.427544 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:07Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.445169 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.445197 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.445210 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.445226 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.445237 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:07Z","lastTransitionTime":"2025-12-15T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.548704 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.548751 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.548764 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.548783 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.548798 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:07Z","lastTransitionTime":"2025-12-15T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.656023 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.656658 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.656684 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.656708 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.656725 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:07Z","lastTransitionTime":"2025-12-15T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.759514 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.759563 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.759576 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.759595 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.759607 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:07Z","lastTransitionTime":"2025-12-15T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.862210 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.862295 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.862322 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.862352 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.862375 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:07Z","lastTransitionTime":"2025-12-15T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.965694 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.966023 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.966147 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.966260 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:07 crc kubenswrapper[4876]: I1215 06:52:07.966340 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:07Z","lastTransitionTime":"2025-12-15T06:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.068892 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.068958 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.068975 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.068999 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.069030 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:08Z","lastTransitionTime":"2025-12-15T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.115686 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/2.log" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.120601 4876 scope.go:117] "RemoveContainer" containerID="d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f" Dec 15 06:52:08 crc kubenswrapper[4876]: E1215 06:52:08.120869 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.144653 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.170032 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.172248 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.172295 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.172309 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.172336 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.172351 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:08Z","lastTransitionTime":"2025-12-15T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.188596 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.201484 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.225418 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:06Z\\\",\\\"message\\\":\\\"d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568803 6542 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nI1215 06:52:06.568825 6542 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1215 06:52:06.568819 6542 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568778 6542 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-controller for network=default\\\\nF1215 06:52:06.568931 6542 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.243756 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.258620 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.275489 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.275536 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.275548 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.275572 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.275592 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:08Z","lastTransitionTime":"2025-12-15T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.289549 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.303051 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.319213 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.333366 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.348544 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.365866 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.378640 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.378842 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.378934 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.379060 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.379168 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:08Z","lastTransitionTime":"2025-12-15T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.383198 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.398431 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.413399 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.424503 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.438907 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:08Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.482413 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.482466 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.482479 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.482497 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.482509 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:08Z","lastTransitionTime":"2025-12-15T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.586013 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.586384 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.586489 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.586577 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.586658 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:08Z","lastTransitionTime":"2025-12-15T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.689702 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.689783 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.689836 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.689867 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.689894 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:08Z","lastTransitionTime":"2025-12-15T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.705299 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.705381 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.705314 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:08 crc kubenswrapper[4876]: E1215 06:52:08.705517 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:08 crc kubenswrapper[4876]: E1215 06:52:08.705648 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:08 crc kubenswrapper[4876]: E1215 06:52:08.705765 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.705848 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:08 crc kubenswrapper[4876]: E1215 06:52:08.705930 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.793496 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.793554 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.793569 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.793593 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.793609 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:08Z","lastTransitionTime":"2025-12-15T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.896514 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.896562 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.896577 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.896598 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.896612 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:08Z","lastTransitionTime":"2025-12-15T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.902227 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:08 crc kubenswrapper[4876]: E1215 06:52:08.902424 4876 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:52:08 crc kubenswrapper[4876]: E1215 06:52:08.902554 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs podName:daee20e1-a017-4464-9626-ea2c52cfae57 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:24.902520098 +0000 UTC m=+70.473663049 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs") pod "network-metrics-daemon-rzth5" (UID: "daee20e1-a017-4464-9626-ea2c52cfae57") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.999674 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.999712 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.999724 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:08 crc kubenswrapper[4876]: I1215 06:52:08.999741 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:08.999753 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:08Z","lastTransitionTime":"2025-12-15T06:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.107768 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.107825 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.107845 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.107888 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.107911 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:09Z","lastTransitionTime":"2025-12-15T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.211181 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.211249 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.211267 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.211290 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.211307 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:09Z","lastTransitionTime":"2025-12-15T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.316028 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.316373 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.316404 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.316436 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.316457 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:09Z","lastTransitionTime":"2025-12-15T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.419381 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.419442 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.419460 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.419485 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.419501 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:09Z","lastTransitionTime":"2025-12-15T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.523020 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.523098 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.523153 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.523178 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.523197 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:09Z","lastTransitionTime":"2025-12-15T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.626599 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.626649 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.626663 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.626682 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.626695 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:09Z","lastTransitionTime":"2025-12-15T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.730083 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.730176 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.730191 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.730210 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.730237 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:09Z","lastTransitionTime":"2025-12-15T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.832856 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.833327 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.833337 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.833351 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.833360 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:09Z","lastTransitionTime":"2025-12-15T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.936534 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.936935 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.937456 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.937855 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:09 crc kubenswrapper[4876]: I1215 06:52:09.938457 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:09Z","lastTransitionTime":"2025-12-15T06:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.042750 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.042821 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.042839 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.042864 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.042884 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:10Z","lastTransitionTime":"2025-12-15T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.145927 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.145995 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.146015 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.146055 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.146092 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:10Z","lastTransitionTime":"2025-12-15T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.249901 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.249987 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.250022 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.250052 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.250072 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:10Z","lastTransitionTime":"2025-12-15T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.353612 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.353667 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.353686 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.353759 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.353793 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:10Z","lastTransitionTime":"2025-12-15T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.457044 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.457099 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.457151 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.457174 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.457191 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:10Z","lastTransitionTime":"2025-12-15T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.560808 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.560877 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.560901 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.560932 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.560957 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:10Z","lastTransitionTime":"2025-12-15T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.664368 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.664447 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.664471 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.664496 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.664516 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:10Z","lastTransitionTime":"2025-12-15T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.705429 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.705516 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.705571 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.705665 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:10 crc kubenswrapper[4876]: E1215 06:52:10.705655 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:10 crc kubenswrapper[4876]: E1215 06:52:10.705769 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:10 crc kubenswrapper[4876]: E1215 06:52:10.705901 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:10 crc kubenswrapper[4876]: E1215 06:52:10.706169 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.767809 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.767891 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.767918 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.767957 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.767982 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:10Z","lastTransitionTime":"2025-12-15T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.870687 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.870738 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.870758 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.870773 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.870783 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:10Z","lastTransitionTime":"2025-12-15T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.973278 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.973336 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.973350 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.973372 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:10 crc kubenswrapper[4876]: I1215 06:52:10.973387 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:10Z","lastTransitionTime":"2025-12-15T06:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.076682 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.077150 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.077339 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.077553 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.077754 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:11Z","lastTransitionTime":"2025-12-15T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.180575 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.180619 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.180635 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.180652 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.180662 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:11Z","lastTransitionTime":"2025-12-15T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.283082 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.283203 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.283263 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.283296 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.283320 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:11Z","lastTransitionTime":"2025-12-15T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.387029 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.387329 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.387457 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.387555 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.387647 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:11Z","lastTransitionTime":"2025-12-15T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.490651 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.490684 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.490692 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.490704 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.490713 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:11Z","lastTransitionTime":"2025-12-15T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.595702 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.595767 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.595780 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.595800 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.595818 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:11Z","lastTransitionTime":"2025-12-15T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.699028 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.699094 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.699157 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.699189 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.699212 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:11Z","lastTransitionTime":"2025-12-15T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.802066 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.802179 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.802210 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.802238 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.802257 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:11Z","lastTransitionTime":"2025-12-15T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.905371 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.905437 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.905461 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.905485 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:11 crc kubenswrapper[4876]: I1215 06:52:11.905502 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:11Z","lastTransitionTime":"2025-12-15T06:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.008069 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.008166 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.008186 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.008213 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.008230 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:12Z","lastTransitionTime":"2025-12-15T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.111931 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.111986 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.111999 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.112015 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.112026 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:12Z","lastTransitionTime":"2025-12-15T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.215165 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.215211 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.215220 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.215237 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.215269 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:12Z","lastTransitionTime":"2025-12-15T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.318959 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.319005 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.319020 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.319037 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.319050 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:12Z","lastTransitionTime":"2025-12-15T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.421195 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.421460 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.421526 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.421633 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.421789 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:12Z","lastTransitionTime":"2025-12-15T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.524652 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.524748 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.524770 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.524800 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.524821 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:12Z","lastTransitionTime":"2025-12-15T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.627896 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.627952 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.627965 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.627979 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.627990 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:12Z","lastTransitionTime":"2025-12-15T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.705482 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.705530 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:12 crc kubenswrapper[4876]: E1215 06:52:12.705654 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.705712 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:12 crc kubenswrapper[4876]: E1215 06:52:12.705809 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.705918 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:12 crc kubenswrapper[4876]: E1215 06:52:12.706013 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:12 crc kubenswrapper[4876]: E1215 06:52:12.706135 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.730885 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.730944 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.730960 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.730979 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.730994 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:12Z","lastTransitionTime":"2025-12-15T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.834693 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.834732 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.834745 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.834761 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.834773 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:12Z","lastTransitionTime":"2025-12-15T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.937866 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.937936 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.937960 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.937989 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:12 crc kubenswrapper[4876]: I1215 06:52:12.938010 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:12Z","lastTransitionTime":"2025-12-15T06:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.041534 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.041592 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.041609 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.041632 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.041649 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.144899 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.144969 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.144987 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.145010 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.145027 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.248883 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.248959 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.248984 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.249015 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.249039 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.352669 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.352730 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.352758 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.352790 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.352814 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.455859 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.456039 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.456059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.456096 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.456148 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.559372 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.559427 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.559445 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.559468 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.559483 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.662288 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.662363 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.662381 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.662405 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.662422 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.765943 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.766003 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.766016 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.766032 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.766045 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.869492 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.869565 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.869594 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.869622 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.869644 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.953574 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.953696 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.953724 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.953758 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.953779 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:13 crc kubenswrapper[4876]: E1215 06:52:13.982386 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:13Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.988850 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.988895 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.988908 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.988927 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:13 crc kubenswrapper[4876]: I1215 06:52:13.988940 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:13Z","lastTransitionTime":"2025-12-15T06:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: E1215 06:52:14.005154 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.009235 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.009292 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.009313 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.009344 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.009365 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: E1215 06:52:14.026837 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.032273 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.032331 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.032342 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.032364 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.032378 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: E1215 06:52:14.052415 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.058211 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.058272 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.058290 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.058316 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.058335 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: E1215 06:52:14.077968 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: E1215 06:52:14.078255 4876 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.080533 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.080588 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.080599 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.080622 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.080634 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.183540 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.183594 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.183612 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.183639 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.183660 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.286581 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.286635 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.286653 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.286677 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.286695 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.389759 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.389827 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.389848 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.389877 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.389897 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.493565 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.494012 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.494142 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.494255 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.494357 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.597923 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.598023 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.598043 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.598075 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.598157 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.701375 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.701446 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.701468 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.701498 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.701525 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.707790 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.708004 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.708183 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.708288 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:14 crc kubenswrapper[4876]: E1215 06:52:14.708296 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:14 crc kubenswrapper[4876]: E1215 06:52:14.708597 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:14 crc kubenswrapper[4876]: E1215 06:52:14.708979 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:14 crc kubenswrapper[4876]: E1215 06:52:14.710763 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.732288 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.751707 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.769466 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.787014 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.801337 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.804270 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.804336 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.804377 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.804399 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.804411 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.816833 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.831365 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.850238 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.865814 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.880560 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.896785 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.907291 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.907366 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.907393 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.907427 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.907451 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:14Z","lastTransitionTime":"2025-12-15T06:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.921965 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:06Z\\\",\\\"message\\\":\\\"d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568803 6542 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nI1215 06:52:06.568825 6542 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1215 06:52:06.568819 6542 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568778 6542 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-controller for network=default\\\\nF1215 06:52:06.568931 6542 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.939875 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.952934 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:14 crc kubenswrapper[4876]: I1215 06:52:14.982420 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:14Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.002173 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:15Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.009587 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.009628 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.009647 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.009668 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.009685 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:15Z","lastTransitionTime":"2025-12-15T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.020753 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:15Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.038239 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:15Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.117488 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.117597 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.117657 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.117720 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.117751 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:15Z","lastTransitionTime":"2025-12-15T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.221561 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.221593 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.221601 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.221648 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.221674 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:15Z","lastTransitionTime":"2025-12-15T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.324590 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.324631 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.324643 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.324659 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.324671 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:15Z","lastTransitionTime":"2025-12-15T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.427135 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.427201 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.427213 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.427228 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.427241 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:15Z","lastTransitionTime":"2025-12-15T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.529493 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.529521 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.529529 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.529543 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.529558 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:15Z","lastTransitionTime":"2025-12-15T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.632393 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.632448 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.632465 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.632489 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.632506 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:15Z","lastTransitionTime":"2025-12-15T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.736474 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.736614 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.736634 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.736663 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.736685 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:15Z","lastTransitionTime":"2025-12-15T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.838971 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.839039 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.839057 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.839081 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.839098 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:15Z","lastTransitionTime":"2025-12-15T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.942435 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.942495 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.942510 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.942530 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:15 crc kubenswrapper[4876]: I1215 06:52:15.942546 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:15Z","lastTransitionTime":"2025-12-15T06:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.046371 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.046433 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.046451 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.046479 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.046497 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:16Z","lastTransitionTime":"2025-12-15T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.148969 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.149035 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.149055 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.149081 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.149130 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:16Z","lastTransitionTime":"2025-12-15T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.252521 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.252572 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.252590 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.252612 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.252634 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:16Z","lastTransitionTime":"2025-12-15T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.355501 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.355553 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.355571 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.355596 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.355612 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:16Z","lastTransitionTime":"2025-12-15T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.459015 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.459414 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.459558 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.459711 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.459891 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:16Z","lastTransitionTime":"2025-12-15T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.563190 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.563257 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.563275 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.563300 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.563317 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:16Z","lastTransitionTime":"2025-12-15T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.666749 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.666845 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.666863 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.666887 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.666907 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:16Z","lastTransitionTime":"2025-12-15T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.705326 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.705326 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.705359 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:16 crc kubenswrapper[4876]: E1215 06:52:16.706046 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:16 crc kubenswrapper[4876]: E1215 06:52:16.706215 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.705413 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:16 crc kubenswrapper[4876]: E1215 06:52:16.706422 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:16 crc kubenswrapper[4876]: E1215 06:52:16.705890 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.769402 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.769488 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.769509 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.769535 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.769555 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:16Z","lastTransitionTime":"2025-12-15T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.871855 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.871900 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.871913 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.871929 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.871941 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:16Z","lastTransitionTime":"2025-12-15T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.975297 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.975368 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.975389 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.975420 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:16 crc kubenswrapper[4876]: I1215 06:52:16.975442 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:16Z","lastTransitionTime":"2025-12-15T06:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.079430 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.079563 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.079587 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.079616 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.079637 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:17Z","lastTransitionTime":"2025-12-15T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.183092 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.183491 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.183691 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.183866 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.184033 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:17Z","lastTransitionTime":"2025-12-15T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.288224 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.288286 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.288306 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.288329 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.288347 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:17Z","lastTransitionTime":"2025-12-15T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.391800 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.392163 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.392271 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.392352 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.392461 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:17Z","lastTransitionTime":"2025-12-15T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.495924 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.496276 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.496405 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.496535 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.496655 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:17Z","lastTransitionTime":"2025-12-15T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.600030 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.600349 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.600420 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.600481 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.600549 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:17Z","lastTransitionTime":"2025-12-15T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.704231 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.704294 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.704309 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.704324 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.704340 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:17Z","lastTransitionTime":"2025-12-15T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.807804 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.808293 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.808441 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.808486 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.808501 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:17Z","lastTransitionTime":"2025-12-15T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.911187 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.911223 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.911239 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.911259 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:17 crc kubenswrapper[4876]: I1215 06:52:17.911274 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:17Z","lastTransitionTime":"2025-12-15T06:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.014136 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.014183 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.014218 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.014238 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.014255 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:18Z","lastTransitionTime":"2025-12-15T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.117950 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.118041 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.118059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.118528 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.118590 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:18Z","lastTransitionTime":"2025-12-15T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.222083 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.222178 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.222192 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.222209 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.222223 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:18Z","lastTransitionTime":"2025-12-15T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.324512 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.324838 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.324924 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.325015 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.325136 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:18Z","lastTransitionTime":"2025-12-15T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.427864 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.427909 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.427919 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.427940 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.427952 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:18Z","lastTransitionTime":"2025-12-15T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.530668 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.530710 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.530719 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.530733 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.530744 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:18Z","lastTransitionTime":"2025-12-15T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.632746 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.632810 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.632826 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.632852 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.632887 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:18Z","lastTransitionTime":"2025-12-15T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.704835 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.704835 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.704864 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:18 crc kubenswrapper[4876]: E1215 06:52:18.705169 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:18 crc kubenswrapper[4876]: E1215 06:52:18.705234 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.705300 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:18 crc kubenswrapper[4876]: E1215 06:52:18.705327 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:18 crc kubenswrapper[4876]: E1215 06:52:18.705497 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.735725 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.735798 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.735815 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.735842 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.735860 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:18Z","lastTransitionTime":"2025-12-15T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.838668 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.838729 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.838738 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.838756 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.838766 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:18Z","lastTransitionTime":"2025-12-15T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.941320 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.941353 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.941362 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.941376 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:18 crc kubenswrapper[4876]: I1215 06:52:18.941387 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:18Z","lastTransitionTime":"2025-12-15T06:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.043144 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.043240 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.043252 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.043283 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.043300 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:19Z","lastTransitionTime":"2025-12-15T06:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.147306 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.147397 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.147416 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.147438 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.147487 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:19Z","lastTransitionTime":"2025-12-15T06:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.254283 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.254328 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.254343 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.254370 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.254385 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:19Z","lastTransitionTime":"2025-12-15T06:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.356543 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.356592 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.356603 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.356615 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.356623 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:19Z","lastTransitionTime":"2025-12-15T06:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.459014 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.459059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.459073 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.459089 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.459100 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:19Z","lastTransitionTime":"2025-12-15T06:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.561606 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.561643 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.561652 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.561666 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.561675 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:19Z","lastTransitionTime":"2025-12-15T06:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.664242 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.664276 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.664285 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.664299 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.664307 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:19Z","lastTransitionTime":"2025-12-15T06:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.767259 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.767295 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.767305 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.767321 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.767333 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:19Z","lastTransitionTime":"2025-12-15T06:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.870530 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.870562 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.870574 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.870588 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.870600 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:19Z","lastTransitionTime":"2025-12-15T06:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.973476 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.973536 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.973549 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.973566 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:19 crc kubenswrapper[4876]: I1215 06:52:19.973602 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:19Z","lastTransitionTime":"2025-12-15T06:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.076762 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.076822 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.076843 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.076877 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.076912 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:20Z","lastTransitionTime":"2025-12-15T06:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.179218 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.179247 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.179257 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.179274 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.179285 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:20Z","lastTransitionTime":"2025-12-15T06:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.282599 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.282639 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.282648 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.282662 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.282673 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:20Z","lastTransitionTime":"2025-12-15T06:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.385330 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.385377 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.385391 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.385412 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.385425 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:20Z","lastTransitionTime":"2025-12-15T06:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.488095 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.488151 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.488161 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.488178 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.488189 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:20Z","lastTransitionTime":"2025-12-15T06:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.591096 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.591163 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.591176 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.591206 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.591220 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:20Z","lastTransitionTime":"2025-12-15T06:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.694219 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.694268 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.694285 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.694306 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.694322 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:20Z","lastTransitionTime":"2025-12-15T06:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.704625 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.704674 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:20 crc kubenswrapper[4876]: E1215 06:52:20.704799 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.704625 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:20 crc kubenswrapper[4876]: E1215 06:52:20.705067 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:20 crc kubenswrapper[4876]: E1215 06:52:20.705188 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.705278 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:20 crc kubenswrapper[4876]: E1215 06:52:20.705517 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.797633 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.797682 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.797696 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.797716 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.797726 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:20Z","lastTransitionTime":"2025-12-15T06:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.900128 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.900175 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.900187 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.900207 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:20 crc kubenswrapper[4876]: I1215 06:52:20.900220 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:20Z","lastTransitionTime":"2025-12-15T06:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.003355 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.003818 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.003836 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.003854 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.003866 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:21Z","lastTransitionTime":"2025-12-15T06:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.106183 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.106215 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.106225 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.106239 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.106250 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:21Z","lastTransitionTime":"2025-12-15T06:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.208869 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.208906 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.208918 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.208935 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.208947 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:21Z","lastTransitionTime":"2025-12-15T06:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.311718 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.311774 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.311783 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.311798 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.311807 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:21Z","lastTransitionTime":"2025-12-15T06:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.413724 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.413765 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.413774 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.413788 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.413797 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:21Z","lastTransitionTime":"2025-12-15T06:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.515672 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.515719 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.515734 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.515789 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.515803 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:21Z","lastTransitionTime":"2025-12-15T06:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.618511 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.618600 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.618615 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.618630 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.618641 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:21Z","lastTransitionTime":"2025-12-15T06:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.722668 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.723143 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.723247 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.723323 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.723434 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:21Z","lastTransitionTime":"2025-12-15T06:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.826538 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.826584 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.826594 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.826613 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.826624 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:21Z","lastTransitionTime":"2025-12-15T06:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.929675 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.929728 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.929739 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.929758 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:21 crc kubenswrapper[4876]: I1215 06:52:21.929776 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:21Z","lastTransitionTime":"2025-12-15T06:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.033477 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.033530 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.033544 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.033564 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.033579 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:22Z","lastTransitionTime":"2025-12-15T06:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.137320 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.137391 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.137414 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.137444 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.137467 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:22Z","lastTransitionTime":"2025-12-15T06:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.241173 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.241227 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.241242 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.241266 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.241282 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:22Z","lastTransitionTime":"2025-12-15T06:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.344951 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.345013 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.345024 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.345043 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.345054 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:22Z","lastTransitionTime":"2025-12-15T06:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.447856 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.447905 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.447918 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.447934 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.447946 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:22Z","lastTransitionTime":"2025-12-15T06:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.550805 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.551450 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.551465 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.551483 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.551495 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:22Z","lastTransitionTime":"2025-12-15T06:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.654559 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.654613 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.654623 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.654642 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.654654 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:22Z","lastTransitionTime":"2025-12-15T06:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.704879 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.704964 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:22 crc kubenswrapper[4876]: E1215 06:52:22.705251 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.705313 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:22 crc kubenswrapper[4876]: E1215 06:52:22.706043 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:22 crc kubenswrapper[4876]: E1215 06:52:22.706170 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.705947 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:22 crc kubenswrapper[4876]: E1215 06:52:22.706291 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.706649 4876 scope.go:117] "RemoveContainer" containerID="d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f" Dec 15 06:52:22 crc kubenswrapper[4876]: E1215 06:52:22.707028 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.757454 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.757520 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.757536 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.757555 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.757570 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:22Z","lastTransitionTime":"2025-12-15T06:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.860482 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.860538 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.860550 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.860567 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.860580 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:22Z","lastTransitionTime":"2025-12-15T06:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.963972 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.964037 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.964055 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.964082 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:22 crc kubenswrapper[4876]: I1215 06:52:22.964128 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:22Z","lastTransitionTime":"2025-12-15T06:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.067506 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.067574 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.067599 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.067625 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.067644 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:23Z","lastTransitionTime":"2025-12-15T06:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.170378 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.170629 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.170647 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.170671 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.170688 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:23Z","lastTransitionTime":"2025-12-15T06:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.275100 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.275218 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.275244 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.275280 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.275304 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:23Z","lastTransitionTime":"2025-12-15T06:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.377671 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.377711 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.377745 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.377762 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.377774 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:23Z","lastTransitionTime":"2025-12-15T06:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.479948 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.479993 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.480005 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.480022 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.480034 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:23Z","lastTransitionTime":"2025-12-15T06:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.582183 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.582219 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.582230 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.582244 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.582255 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:23Z","lastTransitionTime":"2025-12-15T06:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.683672 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.683702 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.683711 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.683724 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.683734 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:23Z","lastTransitionTime":"2025-12-15T06:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.785693 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.785733 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.785744 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.785762 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.785775 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:23Z","lastTransitionTime":"2025-12-15T06:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.888783 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.888830 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.888840 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.888857 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.888867 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:23Z","lastTransitionTime":"2025-12-15T06:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.990910 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.990971 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.990982 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.991002 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:23 crc kubenswrapper[4876]: I1215 06:52:23.991013 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:23Z","lastTransitionTime":"2025-12-15T06:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.093633 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.093705 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.093730 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.093759 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.093783 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.196385 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.196453 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.196472 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.196498 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.196517 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.299364 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.299815 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.299920 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.300011 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.300082 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.403309 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.403352 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.403363 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.403383 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.403394 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.409162 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.409183 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.409194 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.409208 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.409239 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.422218 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.426459 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.426501 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.426513 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.426541 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.426552 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.439764 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.443600 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.443814 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.443874 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.443957 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.444022 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.455848 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.459361 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.459385 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.459395 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.459410 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.459421 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.469620 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.473501 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.473620 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.473692 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.473786 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.473870 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.485064 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.485254 4876 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.505896 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.505945 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.505957 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.505974 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.505991 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.608299 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.608348 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.608365 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.608387 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.608404 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.704588 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.704665 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.704726 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.704744 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.704878 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.704928 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.705222 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.705295 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.710271 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.710306 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.710318 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.710333 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.710350 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.723345 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.753763 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.772580 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.797667 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.813725 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.813767 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.813779 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.813797 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.813808 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.821863 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.836711 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.863072 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:06Z\\\",\\\"message\\\":\\\"d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568803 6542 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nI1215 06:52:06.568825 6542 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1215 06:52:06.568819 6542 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568778 6542 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-controller for network=default\\\\nF1215 06:52:06.568931 6542 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.880468 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.892233 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.905809 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.916394 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.916422 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.916433 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.916447 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.916458 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:24Z","lastTransitionTime":"2025-12-15T06:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.918699 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.937181 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.947434 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.959948 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.971537 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.985043 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.997277 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:24Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:24 crc kubenswrapper[4876]: I1215 06:52:24.998685 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.998943 4876 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:52:24 crc kubenswrapper[4876]: E1215 06:52:24.999086 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs podName:daee20e1-a017-4464-9626-ea2c52cfae57 nodeName:}" failed. No retries permitted until 2025-12-15 06:52:56.99906756 +0000 UTC m=+102.570210481 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs") pod "network-metrics-daemon-rzth5" (UID: "daee20e1-a017-4464-9626-ea2c52cfae57") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.008677 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:25Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.019349 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.019378 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.019387 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.019400 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.019410 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:25Z","lastTransitionTime":"2025-12-15T06:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.121554 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.121794 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.121851 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.121914 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.121988 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:25Z","lastTransitionTime":"2025-12-15T06:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.224265 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.224320 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.224331 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.224349 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.224361 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:25Z","lastTransitionTime":"2025-12-15T06:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.327814 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.327958 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.328022 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.328089 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.328190 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:25Z","lastTransitionTime":"2025-12-15T06:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.430964 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.431244 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.431357 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.431427 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.431494 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:25Z","lastTransitionTime":"2025-12-15T06:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.533736 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.533814 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.533841 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.533867 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.533886 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:25Z","lastTransitionTime":"2025-12-15T06:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.637185 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.637233 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.637246 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.637263 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.637279 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:25Z","lastTransitionTime":"2025-12-15T06:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.740534 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.740587 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.740599 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.740618 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.740631 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:25Z","lastTransitionTime":"2025-12-15T06:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.844215 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.844327 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.844340 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.844361 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.844374 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:25Z","lastTransitionTime":"2025-12-15T06:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.947562 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.947625 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.947644 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.947672 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:25 crc kubenswrapper[4876]: I1215 06:52:25.947700 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:25Z","lastTransitionTime":"2025-12-15T06:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.051075 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.051145 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.051164 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.051184 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.051200 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:26Z","lastTransitionTime":"2025-12-15T06:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.155413 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.155473 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.155496 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.155521 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.155539 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:26Z","lastTransitionTime":"2025-12-15T06:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.183820 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ddcwq_d2c0440d-a8eb-4f51-8626-c3bb9d1b0867/kube-multus/0.log" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.184157 4876 generic.go:334] "Generic (PLEG): container finished" podID="d2c0440d-a8eb-4f51-8626-c3bb9d1b0867" containerID="522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562" exitCode=1 Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.184226 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ddcwq" event={"ID":"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867","Type":"ContainerDied","Data":"522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.184968 4876 scope.go:117] "RemoveContainer" containerID="522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.200503 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.215465 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.239836 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.256878 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.258364 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.258418 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.258438 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.258462 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.258481 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:26Z","lastTransitionTime":"2025-12-15T06:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.273029 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.288996 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.300719 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.322229 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:06Z\\\",\\\"message\\\":\\\"d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568803 6542 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nI1215 06:52:06.568825 6542 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1215 06:52:06.568819 6542 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568778 6542 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-controller for network=default\\\\nF1215 06:52:06.568931 6542 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.339371 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.358155 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.361295 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.361360 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.361380 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.361404 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.361421 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:26Z","lastTransitionTime":"2025-12-15T06:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.375947 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.391694 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.411788 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.431414 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.454171 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.468083 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.468165 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.468185 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.468209 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.468226 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:26Z","lastTransitionTime":"2025-12-15T06:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.472774 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.486830 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.502160 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:25Z\\\",\\\"message\\\":\\\"2025-12-15T06:51:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d\\\\n2025-12-15T06:51:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d to /host/opt/cni/bin/\\\\n2025-12-15T06:51:40Z [verbose] multus-daemon started\\\\n2025-12-15T06:51:40Z [verbose] Readiness Indicator file check\\\\n2025-12-15T06:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:26Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.571156 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.571189 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.571200 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.571216 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.571227 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:26Z","lastTransitionTime":"2025-12-15T06:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.673678 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.673715 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.673724 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.673740 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.673751 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:26Z","lastTransitionTime":"2025-12-15T06:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.705751 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.705795 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.705834 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.705771 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:26 crc kubenswrapper[4876]: E1215 06:52:26.705959 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:26 crc kubenswrapper[4876]: E1215 06:52:26.706027 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:26 crc kubenswrapper[4876]: E1215 06:52:26.706143 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:26 crc kubenswrapper[4876]: E1215 06:52:26.706209 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.776528 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.776575 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.776588 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.776604 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.776616 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:26Z","lastTransitionTime":"2025-12-15T06:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.880231 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.880299 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.880320 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.880347 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.880367 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:26Z","lastTransitionTime":"2025-12-15T06:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.983035 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.983082 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.983094 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.983134 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:26 crc kubenswrapper[4876]: I1215 06:52:26.983144 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:26Z","lastTransitionTime":"2025-12-15T06:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.085173 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.085219 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.085231 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.085250 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.085265 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:27Z","lastTransitionTime":"2025-12-15T06:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.187410 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.187450 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.187462 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.187476 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.187487 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:27Z","lastTransitionTime":"2025-12-15T06:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.190736 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ddcwq_d2c0440d-a8eb-4f51-8626-c3bb9d1b0867/kube-multus/0.log" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.190900 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ddcwq" event={"ID":"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867","Type":"ContainerStarted","Data":"112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008"} Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.206841 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.221691 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.232825 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.244469 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.263034 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:06Z\\\",\\\"message\\\":\\\"d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568803 6542 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nI1215 06:52:06.568825 6542 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1215 06:52:06.568819 6542 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568778 6542 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-controller for network=default\\\\nF1215 06:52:06.568931 6542 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.274218 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.283093 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.289959 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.290011 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.290023 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.290041 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.290053 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:27Z","lastTransitionTime":"2025-12-15T06:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.304338 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.317698 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.336433 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.350615 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.363735 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.378927 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.393195 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.393262 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.393280 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.393305 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.393322 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:27Z","lastTransitionTime":"2025-12-15T06:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.394055 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.410623 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.425498 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.436854 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.451187 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:25Z\\\",\\\"message\\\":\\\"2025-12-15T06:51:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d\\\\n2025-12-15T06:51:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d to /host/opt/cni/bin/\\\\n2025-12-15T06:51:40Z [verbose] multus-daemon started\\\\n2025-12-15T06:51:40Z [verbose] Readiness Indicator file check\\\\n2025-12-15T06:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:27Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.496860 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.496922 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.496944 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.496972 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.496995 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:27Z","lastTransitionTime":"2025-12-15T06:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.608409 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.608493 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.608515 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.608546 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.608564 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:27Z","lastTransitionTime":"2025-12-15T06:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.711660 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.711737 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.711761 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.711787 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.711807 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:27Z","lastTransitionTime":"2025-12-15T06:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.815275 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.815642 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.815720 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.815787 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.815867 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:27Z","lastTransitionTime":"2025-12-15T06:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.918925 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.919204 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.919237 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.919261 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:27 crc kubenswrapper[4876]: I1215 06:52:27.919276 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:27Z","lastTransitionTime":"2025-12-15T06:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.021486 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.021551 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.021570 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.021592 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.021611 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:28Z","lastTransitionTime":"2025-12-15T06:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.123819 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.123920 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.123947 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.124016 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.124040 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:28Z","lastTransitionTime":"2025-12-15T06:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.227451 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.227526 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.227547 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.227578 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.227602 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:28Z","lastTransitionTime":"2025-12-15T06:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.331023 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.331067 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.331120 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.331147 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.331162 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:28Z","lastTransitionTime":"2025-12-15T06:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.435238 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.435284 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.435310 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.435331 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.435345 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:28Z","lastTransitionTime":"2025-12-15T06:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.539415 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.539487 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.539500 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.539522 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.539537 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:28Z","lastTransitionTime":"2025-12-15T06:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.643374 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.643699 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.643794 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.643903 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.643993 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:28Z","lastTransitionTime":"2025-12-15T06:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.705503 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.705679 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:28 crc kubenswrapper[4876]: E1215 06:52:28.705845 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.705923 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.706052 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:28 crc kubenswrapper[4876]: E1215 06:52:28.706174 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:28 crc kubenswrapper[4876]: E1215 06:52:28.706319 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:28 crc kubenswrapper[4876]: E1215 06:52:28.706414 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.747413 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.747481 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.747499 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.747525 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.747546 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:28Z","lastTransitionTime":"2025-12-15T06:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.850318 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.850369 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.850384 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.850406 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.850422 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:28Z","lastTransitionTime":"2025-12-15T06:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.954664 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.955075 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.955330 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.955532 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:28 crc kubenswrapper[4876]: I1215 06:52:28.955683 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:28Z","lastTransitionTime":"2025-12-15T06:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.059177 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.060293 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.060463 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.060649 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.060799 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:29Z","lastTransitionTime":"2025-12-15T06:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.164456 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.164521 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.164534 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.164554 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.164569 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:29Z","lastTransitionTime":"2025-12-15T06:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.267175 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.267445 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.267542 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.267689 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.267778 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:29Z","lastTransitionTime":"2025-12-15T06:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.370826 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.370889 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.370906 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.370930 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.370947 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:29Z","lastTransitionTime":"2025-12-15T06:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.474025 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.474060 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.474072 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.474087 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.474095 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:29Z","lastTransitionTime":"2025-12-15T06:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.577209 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.577263 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.577275 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.577299 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.577314 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:29Z","lastTransitionTime":"2025-12-15T06:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.680551 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.680620 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.680641 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.680669 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.680685 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:29Z","lastTransitionTime":"2025-12-15T06:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.784444 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.784504 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.784523 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.784552 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.784568 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:29Z","lastTransitionTime":"2025-12-15T06:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.887373 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.887429 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.887447 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.887471 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.887490 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:29Z","lastTransitionTime":"2025-12-15T06:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.991193 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.991272 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.991290 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.991322 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:29 crc kubenswrapper[4876]: I1215 06:52:29.991345 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:29Z","lastTransitionTime":"2025-12-15T06:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.095574 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.095638 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.095651 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.095678 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.095692 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:30Z","lastTransitionTime":"2025-12-15T06:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.199646 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.199738 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.199753 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.199777 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.199790 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:30Z","lastTransitionTime":"2025-12-15T06:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.303495 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.303573 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.303598 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.303629 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.303651 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:30Z","lastTransitionTime":"2025-12-15T06:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.407751 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.408222 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.408401 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.408592 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.408753 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:30Z","lastTransitionTime":"2025-12-15T06:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.512834 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.512914 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.512939 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.512972 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.512998 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:30Z","lastTransitionTime":"2025-12-15T06:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.616662 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.616778 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.616802 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.616866 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.616884 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:30Z","lastTransitionTime":"2025-12-15T06:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.705424 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.705520 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.705594 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.705824 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:30 crc kubenswrapper[4876]: E1215 06:52:30.705819 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:30 crc kubenswrapper[4876]: E1215 06:52:30.706036 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:30 crc kubenswrapper[4876]: E1215 06:52:30.706223 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:30 crc kubenswrapper[4876]: E1215 06:52:30.706335 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.720220 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.720290 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.720317 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.720346 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.720367 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:30Z","lastTransitionTime":"2025-12-15T06:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.823597 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.823696 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.823720 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.823748 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.823769 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:30Z","lastTransitionTime":"2025-12-15T06:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.927064 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.927192 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.927220 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.927283 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:30 crc kubenswrapper[4876]: I1215 06:52:30.927301 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:30Z","lastTransitionTime":"2025-12-15T06:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.030246 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.030336 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.030370 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.030400 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.030420 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:31Z","lastTransitionTime":"2025-12-15T06:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.133775 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.133828 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.133838 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.133856 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.133869 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:31Z","lastTransitionTime":"2025-12-15T06:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.236919 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.236979 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.236997 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.237019 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.237036 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:31Z","lastTransitionTime":"2025-12-15T06:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.341296 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.341361 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.341384 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.341418 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.341441 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:31Z","lastTransitionTime":"2025-12-15T06:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.444953 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.445002 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.445020 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.445041 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.445060 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:31Z","lastTransitionTime":"2025-12-15T06:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.547666 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.547715 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.547723 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.547741 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.547751 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:31Z","lastTransitionTime":"2025-12-15T06:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.651689 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.651747 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.651767 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.651795 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.651818 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:31Z","lastTransitionTime":"2025-12-15T06:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.720812 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.755684 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.755736 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.755748 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.755765 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.755780 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:31Z","lastTransitionTime":"2025-12-15T06:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.858760 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.858816 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.858833 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.858855 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.858872 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:31Z","lastTransitionTime":"2025-12-15T06:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.962005 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.962073 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.962096 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.962168 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:31 crc kubenswrapper[4876]: I1215 06:52:31.962195 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:31Z","lastTransitionTime":"2025-12-15T06:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.065453 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.065544 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.065575 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.065609 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.065640 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:32Z","lastTransitionTime":"2025-12-15T06:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.169045 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.169137 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.169157 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.169180 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.169197 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:32Z","lastTransitionTime":"2025-12-15T06:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.272438 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.272506 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.272531 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.272558 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.272581 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:32Z","lastTransitionTime":"2025-12-15T06:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.376864 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.376933 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.376958 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.376988 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.377014 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:32Z","lastTransitionTime":"2025-12-15T06:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.480849 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.480957 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.480983 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.481015 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.481043 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:32Z","lastTransitionTime":"2025-12-15T06:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.583954 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.584016 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.584037 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.584063 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.584079 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:32Z","lastTransitionTime":"2025-12-15T06:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.687072 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.687185 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.687208 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.687242 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.687265 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:32Z","lastTransitionTime":"2025-12-15T06:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.704777 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.704817 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.704779 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.704880 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:32 crc kubenswrapper[4876]: E1215 06:52:32.705004 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:32 crc kubenswrapper[4876]: E1215 06:52:32.705200 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:32 crc kubenswrapper[4876]: E1215 06:52:32.705341 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:32 crc kubenswrapper[4876]: E1215 06:52:32.705529 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.798980 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.799059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.799079 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.799144 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.799165 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:32Z","lastTransitionTime":"2025-12-15T06:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.903449 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.904215 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.904251 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.904276 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:32 crc kubenswrapper[4876]: I1215 06:52:32.904294 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:32Z","lastTransitionTime":"2025-12-15T06:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.007538 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.007602 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.007625 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.007655 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.007677 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:33Z","lastTransitionTime":"2025-12-15T06:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.111366 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.111447 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.111467 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.111494 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.111514 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:33Z","lastTransitionTime":"2025-12-15T06:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.214205 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.214281 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.214298 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.214323 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.214339 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:33Z","lastTransitionTime":"2025-12-15T06:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.317741 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.317873 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.317899 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.317932 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.317961 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:33Z","lastTransitionTime":"2025-12-15T06:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.421469 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.421537 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.421565 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.421596 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.421620 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:33Z","lastTransitionTime":"2025-12-15T06:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.523883 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.523951 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.523979 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.524008 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.524035 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:33Z","lastTransitionTime":"2025-12-15T06:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.628011 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.628157 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.628187 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.628214 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.628232 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:33Z","lastTransitionTime":"2025-12-15T06:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.730959 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.731033 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.731154 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.731196 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.731221 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:33Z","lastTransitionTime":"2025-12-15T06:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.834477 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.834544 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.834554 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.834577 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.834591 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:33Z","lastTransitionTime":"2025-12-15T06:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.938794 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.938869 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.938887 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.938915 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:33 crc kubenswrapper[4876]: I1215 06:52:33.938935 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:33Z","lastTransitionTime":"2025-12-15T06:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.042275 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.042348 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.042365 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.042390 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.042408 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.146293 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.146366 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.146385 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.146410 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.146428 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.248854 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.248902 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.248914 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.248931 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.248942 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.352229 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.352271 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.352279 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.352294 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.352304 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.455829 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.455915 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.455939 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.455972 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.455990 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.559464 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.559525 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.559537 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.559560 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.559572 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.662217 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.662750 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.662763 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.662784 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.662800 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.704789 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.704935 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.705036 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.705068 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:34 crc kubenswrapper[4876]: E1215 06:52:34.704980 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:34 crc kubenswrapper[4876]: E1215 06:52:34.705192 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:34 crc kubenswrapper[4876]: E1215 06:52:34.705297 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:34 crc kubenswrapper[4876]: E1215 06:52:34.705401 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.737276 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:06Z\\\",\\\"message\\\":\\\"d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568803 6542 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nI1215 06:52:06.568825 6542 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1215 06:52:06.568819 6542 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568778 6542 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-controller for network=default\\\\nF1215 06:52:06.568931 6542 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.753403 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.766118 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.766183 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.766219 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.766242 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.766256 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.767202 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.807201 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.826922 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.854240 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.854471 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.854546 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.854564 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.854594 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.854612 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.867969 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: E1215 06:52:34.871270 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.876050 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.876148 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.876168 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.876191 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.876208 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.883774 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: E1215 06:52:34.896380 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.899863 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.901507 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.901567 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.901589 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.901619 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.901645 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.911590 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: E1215 06:52:34.922635 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.927536 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.927587 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.927607 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.927628 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.927643 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.930534 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: E1215 06:52:34.941687 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.946816 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.946871 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.946887 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.946909 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.946925 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.947765 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.960425 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: E1215 06:52:34.964835 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: E1215 06:52:34.965001 4876 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.967231 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.967268 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.967280 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.967301 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.967316 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:34Z","lastTransitionTime":"2025-12-15T06:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.972641 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed406b28-937f-4a6a-b39d-a876d1c75d4a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71dcd5a6f63f179cddda02c3eaf4a306a0d65724af1d2263ac11adf05c63b73d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.986767 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:34 crc kubenswrapper[4876]: I1215 06:52:34.999318 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:34Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.014369 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.027367 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.039147 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:25Z\\\",\\\"message\\\":\\\"2025-12-15T06:51:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d\\\\n2025-12-15T06:51:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d to /host/opt/cni/bin/\\\\n2025-12-15T06:51:40Z [verbose] multus-daemon started\\\\n2025-12-15T06:51:40Z [verbose] Readiness Indicator file check\\\\n2025-12-15T06:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:35Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.070247 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.070297 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.070313 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.070335 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.070351 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:35Z","lastTransitionTime":"2025-12-15T06:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.173352 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.173423 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.173441 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.173466 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.173486 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:35Z","lastTransitionTime":"2025-12-15T06:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.276840 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.276890 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.276902 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.276922 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.276936 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:35Z","lastTransitionTime":"2025-12-15T06:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.380534 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.380592 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.380607 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.380626 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.380642 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:35Z","lastTransitionTime":"2025-12-15T06:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.483610 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.483677 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.483695 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.483720 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.483739 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:35Z","lastTransitionTime":"2025-12-15T06:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.587447 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.587506 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.587527 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.587554 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.587575 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:35Z","lastTransitionTime":"2025-12-15T06:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.690718 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.690793 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.690809 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.690833 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.690845 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:35Z","lastTransitionTime":"2025-12-15T06:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.706553 4876 scope.go:117] "RemoveContainer" containerID="d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.794292 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.794352 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.794372 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.794398 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.794417 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:35Z","lastTransitionTime":"2025-12-15T06:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.897526 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.897564 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.897573 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.897591 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:35 crc kubenswrapper[4876]: I1215 06:52:35.897601 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:35Z","lastTransitionTime":"2025-12-15T06:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.000458 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.000525 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.000546 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.000570 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.000586 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:36Z","lastTransitionTime":"2025-12-15T06:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.102887 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.102937 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.102956 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.102980 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.102995 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:36Z","lastTransitionTime":"2025-12-15T06:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.205789 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.205842 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.205857 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.205876 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.205937 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:36Z","lastTransitionTime":"2025-12-15T06:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.227829 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/2.log" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.231188 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.231592 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.245191 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:25Z\\\",\\\"message\\\":\\\"2025-12-15T06:51:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d\\\\n2025-12-15T06:51:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d to /host/opt/cni/bin/\\\\n2025-12-15T06:51:40Z [verbose] multus-daemon started\\\\n2025-12-15T06:51:40Z [verbose] Readiness Indicator file check\\\\n2025-12-15T06:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.269123 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.308870 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.308917 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.308927 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.308941 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.308950 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:36Z","lastTransitionTime":"2025-12-15T06:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.319563 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.341616 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.355158 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.370293 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.384736 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.394912 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.411663 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.411751 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.411773 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.411800 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.411814 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:36Z","lastTransitionTime":"2025-12-15T06:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.418020 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:06Z\\\",\\\"message\\\":\\\"d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568803 6542 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nI1215 06:52:06.568825 6542 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1215 06:52:06.568819 6542 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568778 6542 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-controller for network=default\\\\nF1215 06:52:06.568931 6542 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:52:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.433089 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.448884 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.467301 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.493299 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.507982 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed406b28-937f-4a6a-b39d-a876d1c75d4a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71dcd5a6f63f179cddda02c3eaf4a306a0d65724af1d2263ac11adf05c63b73d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.514346 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.514398 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.514410 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.514430 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.514444 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:36Z","lastTransitionTime":"2025-12-15T06:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.522730 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.537062 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.551842 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.565699 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.580923 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:36Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.616857 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.616908 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.616922 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.616948 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.616963 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:36Z","lastTransitionTime":"2025-12-15T06:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.640830 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.641137 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.641068754 +0000 UTC m=+146.212211675 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.705361 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.705420 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.705487 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.705624 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.705674 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.706230 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.705820 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.705933 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.719025 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.719080 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.719093 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.719142 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.719155 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:36Z","lastTransitionTime":"2025-12-15T06:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.742032 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.742158 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.742214 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.742287 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742316 4876 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742431 4876 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742469 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742482 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.742440993 +0000 UTC m=+146.313584084 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742501 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742499 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742522 4876 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742545 4876 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742551 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.742526705 +0000 UTC m=+146.313669616 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742564 4876 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742607 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.742582797 +0000 UTC m=+146.313725748 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:52:36 crc kubenswrapper[4876]: E1215 06:52:36.742637 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.742622548 +0000 UTC m=+146.313765489 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.821913 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.821957 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.821967 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.821984 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.821994 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:36Z","lastTransitionTime":"2025-12-15T06:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.924916 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.924953 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.924966 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.924985 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:36 crc kubenswrapper[4876]: I1215 06:52:36.924999 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:36Z","lastTransitionTime":"2025-12-15T06:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.028602 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.028652 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.028669 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.028694 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.028712 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:37Z","lastTransitionTime":"2025-12-15T06:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.131896 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.132012 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.132032 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.132060 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.132077 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:37Z","lastTransitionTime":"2025-12-15T06:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.234452 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.234513 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.234531 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.234555 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.234574 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:37Z","lastTransitionTime":"2025-12-15T06:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.237718 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/3.log" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.238463 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/2.log" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.242217 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93" exitCode=1 Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.242298 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.242359 4876 scope.go:117] "RemoveContainer" containerID="d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.243879 4876 scope.go:117] "RemoveContainer" containerID="562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93" Dec 15 06:52:37 crc kubenswrapper[4876]: E1215 06:52:37.244266 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.268082 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.281657 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.301531 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.322439 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.338023 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.338073 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.338091 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.338127 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.338140 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:37Z","lastTransitionTime":"2025-12-15T06:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.341436 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.361560 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.388358 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.413062 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.440737 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.440787 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.440803 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.440826 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.440840 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:37Z","lastTransitionTime":"2025-12-15T06:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.449413 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed406b28-937f-4a6a-b39d-a876d1c75d4a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71dcd5a6f63f179cddda02c3eaf4a306a0d65724af1d2263ac11adf05c63b73d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.470957 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.489774 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:25Z\\\",\\\"message\\\":\\\"2025-12-15T06:51:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d\\\\n2025-12-15T06:51:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d to /host/opt/cni/bin/\\\\n2025-12-15T06:51:40Z [verbose] multus-daemon started\\\\n2025-12-15T06:51:40Z [verbose] Readiness Indicator file check\\\\n2025-12-15T06:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.502838 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.516178 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.525561 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.541466 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d89d0a1ddedf6e9234ce592a0a96ed5e36d52d56e41c1360992604a5cbd3119f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:06Z\\\",\\\"message\\\":\\\"d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568803 6542 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nI1215 06:52:06.568825 6542 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1215 06:52:06.568819 6542 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-cluster-version/cluster-version-operator]} name:Service_openshift-cluster-version/cluster-version-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.182:9099:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61d39e4d-21a9-4387-9a2b-fa4ad14792e2}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1215 06:52:06.568778 6542 services_controller.go:356] Processing sync for service openshift-machine-config-operator/machine-config-controller for network=default\\\\nF1215 06:52:06.568931 6542 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:36Z\\\",\\\"message\\\":\\\"ary/ingress-canary for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1215 06:52:36.735752 6980 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-zdprc\\\\nI1215 06:52:36.736445 6980 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-zdprc in node crc\\\\nI1215 06:52:36.736419 6980 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.542708 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.542847 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.542940 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.543041 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.543134 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:37Z","lastTransitionTime":"2025-12-15T06:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.556420 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.570455 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.588656 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.602802 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:37Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.645675 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.645979 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.646062 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.646178 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.646256 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:37Z","lastTransitionTime":"2025-12-15T06:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.749839 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.750290 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.750497 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.750684 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.750859 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:37Z","lastTransitionTime":"2025-12-15T06:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.855166 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.855236 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.855247 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.855275 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.855293 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:37Z","lastTransitionTime":"2025-12-15T06:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.958537 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.958589 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.958604 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.958630 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:37 crc kubenswrapper[4876]: I1215 06:52:37.958645 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:37Z","lastTransitionTime":"2025-12-15T06:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.061819 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.061871 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.061888 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.061914 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.061930 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:38Z","lastTransitionTime":"2025-12-15T06:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.165575 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.166165 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.166362 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.167004 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.167212 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:38Z","lastTransitionTime":"2025-12-15T06:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.254269 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/3.log" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.259406 4876 scope.go:117] "RemoveContainer" containerID="562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93" Dec 15 06:52:38 crc kubenswrapper[4876]: E1215 06:52:38.259556 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.270229 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.270297 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.270311 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.270338 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.270354 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:38Z","lastTransitionTime":"2025-12-15T06:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.281294 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.297085 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.312639 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.331716 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.349793 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.369333 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.374081 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.374147 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.374164 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.374183 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.374194 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:38Z","lastTransitionTime":"2025-12-15T06:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.384768 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.396504 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.408092 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed406b28-937f-4a6a-b39d-a876d1c75d4a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71dcd5a6f63f179cddda02c3eaf4a306a0d65724af1d2263ac11adf05c63b73d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.422502 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.435642 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:25Z\\\",\\\"message\\\":\\\"2025-12-15T06:51:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d\\\\n2025-12-15T06:51:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d to /host/opt/cni/bin/\\\\n2025-12-15T06:51:40Z [verbose] multus-daemon started\\\\n2025-12-15T06:51:40Z [verbose] Readiness Indicator file check\\\\n2025-12-15T06:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.448452 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.459354 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.469138 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.476937 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.476968 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.476976 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.476990 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.476999 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:38Z","lastTransitionTime":"2025-12-15T06:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.485756 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:36Z\\\",\\\"message\\\":\\\"ary/ingress-canary for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1215 06:52:36.735752 6980 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-zdprc\\\\nI1215 06:52:36.736445 6980 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-zdprc in node crc\\\\nI1215 06:52:36.736419 6980 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.495494 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.503739 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.522143 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.535164 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:38Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.579253 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.579476 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.579538 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.579638 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.579703 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:38Z","lastTransitionTime":"2025-12-15T06:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.683470 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.683893 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.684062 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.684328 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.684523 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:38Z","lastTransitionTime":"2025-12-15T06:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.705286 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:38 crc kubenswrapper[4876]: E1215 06:52:38.705445 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.705702 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.705711 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.705991 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:38 crc kubenswrapper[4876]: E1215 06:52:38.705931 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:38 crc kubenswrapper[4876]: E1215 06:52:38.706078 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:38 crc kubenswrapper[4876]: E1215 06:52:38.706174 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.788055 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.788152 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.788176 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.788200 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.788218 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:38Z","lastTransitionTime":"2025-12-15T06:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.890501 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.890541 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.890552 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.890570 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.890581 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:38Z","lastTransitionTime":"2025-12-15T06:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.994251 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.994322 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.994347 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.994408 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:38 crc kubenswrapper[4876]: I1215 06:52:38.994429 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:38Z","lastTransitionTime":"2025-12-15T06:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.097728 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.097830 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.097850 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.097876 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.097895 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:39Z","lastTransitionTime":"2025-12-15T06:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.201041 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.201152 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.201180 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.201213 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.201238 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:39Z","lastTransitionTime":"2025-12-15T06:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.304514 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.304568 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.304589 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.304614 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.304630 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:39Z","lastTransitionTime":"2025-12-15T06:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.407693 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.407769 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.407809 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.407843 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.407866 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:39Z","lastTransitionTime":"2025-12-15T06:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.511779 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.511854 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.511879 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.511915 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.511938 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:39Z","lastTransitionTime":"2025-12-15T06:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.615208 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.615265 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.615283 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.615309 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.615329 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:39Z","lastTransitionTime":"2025-12-15T06:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.718264 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.718337 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.718352 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.718379 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.718394 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:39Z","lastTransitionTime":"2025-12-15T06:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.821976 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.822034 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.822061 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.822093 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.822165 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:39Z","lastTransitionTime":"2025-12-15T06:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.925693 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.925739 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.925756 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.925780 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:39 crc kubenswrapper[4876]: I1215 06:52:39.925799 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:39Z","lastTransitionTime":"2025-12-15T06:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.029781 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.029834 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.029850 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.029875 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.029891 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:40Z","lastTransitionTime":"2025-12-15T06:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.132688 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.132751 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.132780 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.132810 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.132828 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:40Z","lastTransitionTime":"2025-12-15T06:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.236170 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.236251 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.236267 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.236291 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.236305 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:40Z","lastTransitionTime":"2025-12-15T06:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.340226 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.340293 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.340355 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.340380 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.340399 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:40Z","lastTransitionTime":"2025-12-15T06:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.442904 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.442975 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.442995 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.443022 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.443041 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:40Z","lastTransitionTime":"2025-12-15T06:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.546667 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.546726 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.546744 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.546771 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.546790 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:40Z","lastTransitionTime":"2025-12-15T06:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.649984 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.650041 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.650059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.650082 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.650099 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:40Z","lastTransitionTime":"2025-12-15T06:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.705305 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.705338 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.705502 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:40 crc kubenswrapper[4876]: E1215 06:52:40.705646 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:40 crc kubenswrapper[4876]: E1215 06:52:40.705809 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.706173 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:40 crc kubenswrapper[4876]: E1215 06:52:40.706256 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:40 crc kubenswrapper[4876]: E1215 06:52:40.706100 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.752925 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.752968 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.752978 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.752993 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.753005 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:40Z","lastTransitionTime":"2025-12-15T06:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.856370 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.856445 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.856465 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.856491 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.856511 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:40Z","lastTransitionTime":"2025-12-15T06:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.958814 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.958868 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.958888 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.958909 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:40 crc kubenswrapper[4876]: I1215 06:52:40.958922 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:40Z","lastTransitionTime":"2025-12-15T06:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.061569 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.061609 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.061621 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.061638 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.061650 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:41Z","lastTransitionTime":"2025-12-15T06:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.164362 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.164417 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.164432 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.164451 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.164463 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:41Z","lastTransitionTime":"2025-12-15T06:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.267097 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.267186 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.267200 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.267218 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.267231 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:41Z","lastTransitionTime":"2025-12-15T06:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.370480 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.370540 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.370555 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.370577 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.370591 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:41Z","lastTransitionTime":"2025-12-15T06:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.473380 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.473469 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.473496 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.473526 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.473547 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:41Z","lastTransitionTime":"2025-12-15T06:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.577096 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.577185 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.577197 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.577217 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.577229 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:41Z","lastTransitionTime":"2025-12-15T06:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.680719 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.680770 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.680783 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.680804 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.680817 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:41Z","lastTransitionTime":"2025-12-15T06:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.784052 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.784095 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.784120 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.784137 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.784149 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:41Z","lastTransitionTime":"2025-12-15T06:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.887342 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.887446 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.887477 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.887510 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.887534 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:41Z","lastTransitionTime":"2025-12-15T06:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.992767 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.993305 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.993327 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.993356 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:41 crc kubenswrapper[4876]: I1215 06:52:41.993375 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:41Z","lastTransitionTime":"2025-12-15T06:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.096720 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.097062 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.097214 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.097323 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.097397 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:42Z","lastTransitionTime":"2025-12-15T06:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.199963 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.200027 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.200040 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.200059 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.200071 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:42Z","lastTransitionTime":"2025-12-15T06:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.303291 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.303346 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.303361 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.303386 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.303402 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:42Z","lastTransitionTime":"2025-12-15T06:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.406472 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.406530 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.406543 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.406570 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.406589 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:42Z","lastTransitionTime":"2025-12-15T06:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.509477 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.509534 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.509547 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.509569 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.509582 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:42Z","lastTransitionTime":"2025-12-15T06:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.612616 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.612663 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.612678 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.612698 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.612713 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:42Z","lastTransitionTime":"2025-12-15T06:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.705037 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.705092 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.705403 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.705488 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:42 crc kubenswrapper[4876]: E1215 06:52:42.705493 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:42 crc kubenswrapper[4876]: E1215 06:52:42.705818 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:42 crc kubenswrapper[4876]: E1215 06:52:42.706122 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:42 crc kubenswrapper[4876]: E1215 06:52:42.706220 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.715815 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.715857 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.715870 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.715888 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.715904 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:42Z","lastTransitionTime":"2025-12-15T06:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.819002 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.819069 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.819094 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.819162 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.819186 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:42Z","lastTransitionTime":"2025-12-15T06:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.921235 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.921298 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.921313 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.921340 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:42 crc kubenswrapper[4876]: I1215 06:52:42.921360 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:42Z","lastTransitionTime":"2025-12-15T06:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.024526 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.024593 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.024609 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.024637 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.024657 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:43Z","lastTransitionTime":"2025-12-15T06:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.126893 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.126928 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.126936 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.126951 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.126959 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:43Z","lastTransitionTime":"2025-12-15T06:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.229633 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.229693 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.229708 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.229731 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.229744 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:43Z","lastTransitionTime":"2025-12-15T06:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.333696 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.333746 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.333755 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.333772 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.333784 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:43Z","lastTransitionTime":"2025-12-15T06:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.436764 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.436808 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.436821 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.436839 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.436852 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:43Z","lastTransitionTime":"2025-12-15T06:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.539473 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.539520 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.539532 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.539549 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.539561 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:43Z","lastTransitionTime":"2025-12-15T06:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.642613 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.642679 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.642702 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.642730 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.642751 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:43Z","lastTransitionTime":"2025-12-15T06:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.745812 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.745861 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.745880 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.745909 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.745934 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:43Z","lastTransitionTime":"2025-12-15T06:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.848587 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.848628 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.848639 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.848656 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.848667 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:43Z","lastTransitionTime":"2025-12-15T06:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.952242 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.952293 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.952306 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.952326 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:43 crc kubenswrapper[4876]: I1215 06:52:43.952338 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:43Z","lastTransitionTime":"2025-12-15T06:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.054690 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.054755 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.054776 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.054805 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.054824 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:44Z","lastTransitionTime":"2025-12-15T06:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.157242 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.157308 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.157334 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.157365 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.157395 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:44Z","lastTransitionTime":"2025-12-15T06:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.260820 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.260901 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.260930 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.260959 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.260980 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:44Z","lastTransitionTime":"2025-12-15T06:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.363899 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.363970 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.363990 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.365193 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.365266 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:44Z","lastTransitionTime":"2025-12-15T06:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.469277 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.469343 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.469361 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.469385 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.469399 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:44Z","lastTransitionTime":"2025-12-15T06:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.572347 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.572407 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.572424 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.572448 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.572468 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:44Z","lastTransitionTime":"2025-12-15T06:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.676267 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.676311 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.676327 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.676348 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.676364 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:44Z","lastTransitionTime":"2025-12-15T06:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.705460 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.705648 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.705749 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.706034 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:44 crc kubenswrapper[4876]: E1215 06:52:44.706181 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:44 crc kubenswrapper[4876]: E1215 06:52:44.706295 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:44 crc kubenswrapper[4876]: E1215 06:52:44.706417 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:44 crc kubenswrapper[4876]: E1215 06:52:44.706519 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.727683 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-ddcwq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:25Z\\\",\\\"message\\\":\\\"2025-12-15T06:51:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d\\\\n2025-12-15T06:51:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_14d55dc1-2b66-4ea3-b986-9aa152dc7f0d to /host/opt/cni/bin/\\\\n2025-12-15T06:51:40Z [verbose] multus-daemon started\\\\n2025-12-15T06:51:40Z [verbose] Readiness Indicator file check\\\\n2025-12-15T06:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tgztf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-ddcwq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.744251 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nkxj7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4405bff2-7918-48ba-97b3-81079e042256\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2eab34060cdbdb479b4f98ec3e38b6482c69fe6d3ba09e1ae0dbc370041ba6b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4fkrr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nkxj7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.772230 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-15T06:52:36Z\\\",\\\"message\\\":\\\"ary/ingress-canary for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1215 06:52:36.735752 6980 obj_retry.go:365] Adding new object: *v1.Pod openshift-machine-config-operator/machine-config-daemon-zdprc\\\\nI1215 06:52:36.736445 6980 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-zdprc in node crc\\\\nI1215 06:52:36.736419 6980 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:52:35Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zzj5w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-wm92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.779836 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.780184 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.780390 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.780561 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.780704 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:44Z","lastTransitionTime":"2025-12-15T06:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.794408 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40cfea4f-327e-4198-a6ea-b4382d20ba28\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06fbdfda734eefa4ad7e28bf6435d234bdde66e8a546821af83d37248d85d758\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://814ed9f8435ed8fedab2ef3339b1ef9158fb130db9ad8db2fc1f5ccbe76a6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2tlt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-5kzkz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.811038 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rzth5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daee20e1-a017-4464-9626-ea2c52cfae57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:53Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mkcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:53Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rzth5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.841939 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ae3048-5e11-4c20-91eb-675bc47919d4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://896353cf532c03a1d4d6cca0872f9aeae0b6cf4ee9a71c4469b6dad58baedf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc124424ae10e2f9cabfd2efaeb0e4edbeb3ff3591a94956af0a1aa83d1702ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c4c16266b40cd2bb5922f57e11eb82c496a21921b04977a60fa0ce86201c7bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9df1f2745da78ee20938ecd460b394b9e269e936d63e721643238e61582881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f59f1925f7e309e533406fa66abde057e64d501a15405264de47d1710f1d97f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7c5f2a85a2950bc2346a5ff3dded1671bcf54004cf062f7e07d359e14e0af8a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a55c36f6245f838900b52ebd0150b007542dc297c3c1a772a22a0e8f6a4c55e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1f378b8cea0977aa4116365292cf2e48bf41473d4f261d6ed8d336e8259025d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.861128 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f3a728ba-8b04-4c1d-8c0a-0518c330f7b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1215 06:51:27.247457 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1215 06:51:27.249090 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3035428886/tls.crt::/tmp/serving-cert-3035428886/tls.key\\\\\\\"\\\\nI1215 06:51:32.715762 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1215 06:51:32.721616 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1215 06:51:32.721686 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1215 06:51:32.721739 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1215 06:51:32.721750 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1215 06:51:32.735009 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1215 06:51:32.735185 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1215 06:51:32.735258 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735268 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1215 06:51:32.735274 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1215 06:51:32.735282 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1215 06:51:32.735287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1215 06:51:32.735292 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1215 06:51:32.738668 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.879648 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e94d08df-d7fb-41a2-a694-6c23b7415851\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b6c4b40ab707ff05c648ee359d923296f9f6a1b25cae7e357db51294c49f73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dbd063c8c1887e2b592e9bf216bcc78ec6ab222d9f57b602c1b7658b487f9594\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e658d9ec8dee7d3f73d4a4f7ec047427901423211c8564d386be8b78e06bd915\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.883560 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.883643 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.883669 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.883703 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.883727 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:44Z","lastTransitionTime":"2025-12-15T06:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.896222 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.911097 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cae7220-f622-42b3-ac60-786f6d8ebc80\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca5ddaeab5b3b024b7ee9421dd0f43da0510a4d13d9244271457183a3518cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1aa4f9910ee14016b479132218c78df7d93d343a88f44b272dfaadce8f6ec873\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bac63bff648d7d7c118491daf36751e0f49854af601fa92b29d8559c57ad7410\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://59b8bfc4e0cc47973188e618c6f2421db2f794f0e7695a65b61a81836d89acfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.925701 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.942332 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-47dbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9f253db4-4bae-4bc6-ae0d-a15c6a19ab86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97f87ca77fe87b0b5c70c43a39341f6c39feee76d15cd85fb027d78f7209d08c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://866e7fcc33be834cfb590812d930126adadc4a769999f26dfa6136be4861dbe1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60d4154ea8dbcced09931328a34ac9737cfdd2cabd66f33e2a62157f5d2c6063\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94a242af7f79bd4f900806eff8ad1098488eea9484a5ed56f0e9c4f53715a8b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3dd148806fc6e27ea583915f3122420a029ec29c69207d23f5ad78f30a4586e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26cf6794660948fb1cb40b23f9b1d2687a96c8af4e8f419126fc2e050799f22d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a82d2d55ce7d09a65e898a787d50ff57446bde8664cb29a38edb8f63f0d300c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kt2gt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-47dbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.957811 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f9e29c3a-f186-4bb8-af46-82cea3a16508\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://860ced33f891a28d04ddc5d8e80f6ca480397ce9274784c7b4d2c67d7b37d908\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2qgc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-zdprc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.977500 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcc0ba4a058ec383d6137652fd728613e66256b83b9e4242fd7e4789013b5595\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.986888 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.986959 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.986984 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.987017 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.987045 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:44Z","lastTransitionTime":"2025-12-15T06:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:44 crc kubenswrapper[4876]: I1215 06:52:44.995434 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vlwzk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"410e3d06-6cd9-4b3a-812f-f07217c30488\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e5ce348158c35354cdead7224cdec97edcc7d3593dfc088fa0d176f021a4077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6jjs6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vlwzk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:44Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.013440 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed406b28-937f-4a6a-b39d-a876d1c75d4a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71dcd5a6f63f179cddda02c3eaf4a306a0d65724af1d2263ac11adf05c63b73d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bc165ce44894dab9da041af3c5ccaaddbbb3f6143b69cc11c1fc5d2b77f2ef6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-15T06:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-15T06:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-15T06:51:14Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.034263 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.057191 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://741f21e3353cf5a5c8392a8bcc003c6ee374b8ee02ae9dc539aa509d13371530\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.077166 4876 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-15T06:51:33Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5bf691aac1551d641ca60ae2ca696519e5562014a4715a04c02742e80ecce32a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a9cd65af37b38fcf37448ead77b47a197c831c03fe26d93bdd5044e99412d67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-15T06:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.090835 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.090893 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.090905 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.090922 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.090935 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.165213 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.165284 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.165309 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.165341 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.165367 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: E1215 06:52:45.186554 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.196679 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.196782 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.196814 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.196849 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.196874 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: E1215 06:52:45.216765 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.223469 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.223526 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.223547 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.223567 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.223584 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: E1215 06:52:45.238935 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.243778 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.243810 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.243820 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.243837 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.243849 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: E1215 06:52:45.264479 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.269255 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.269305 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.269319 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.269337 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.269350 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: E1215 06:52:45.288961 4876 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-15T06:52:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"9e406852-849d-435c-ab8a-94b3d3d795a3\\\",\\\"systemUUID\\\":\\\"a6c10a00-26cd-4256-831a-0419287771d9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-15T06:52:45Z is after 2025-08-24T17:21:41Z" Dec 15 06:52:45 crc kubenswrapper[4876]: E1215 06:52:45.289079 4876 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.291035 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.291081 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.291097 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.291133 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.291156 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.394062 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.394189 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.394210 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.394236 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.394254 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.497769 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.497832 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.497854 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.497882 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.497905 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.602078 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.602168 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.602187 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.602211 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.602229 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.705324 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.705366 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.705378 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.705396 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.705406 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.808829 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.808876 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.808888 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.808905 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.808916 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.911509 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.911559 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.911568 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.911586 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:45 crc kubenswrapper[4876]: I1215 06:52:45.911598 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:45Z","lastTransitionTime":"2025-12-15T06:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.015314 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.015364 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.015380 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.015410 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.015426 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:46Z","lastTransitionTime":"2025-12-15T06:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.119263 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.119331 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.119342 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.119362 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.119385 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:46Z","lastTransitionTime":"2025-12-15T06:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.223018 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.223071 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.223089 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.223178 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.223198 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:46Z","lastTransitionTime":"2025-12-15T06:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.326307 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.326363 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.326381 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.326405 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.326421 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:46Z","lastTransitionTime":"2025-12-15T06:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.430136 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.430202 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.430220 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.430251 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.430268 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:46Z","lastTransitionTime":"2025-12-15T06:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.534319 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.534389 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.534406 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.534432 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.534452 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:46Z","lastTransitionTime":"2025-12-15T06:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.637477 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.637565 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.637586 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.637613 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.637630 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:46Z","lastTransitionTime":"2025-12-15T06:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.704842 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.704906 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.704919 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.705063 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:46 crc kubenswrapper[4876]: E1215 06:52:46.705065 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:46 crc kubenswrapper[4876]: E1215 06:52:46.705290 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:46 crc kubenswrapper[4876]: E1215 06:52:46.705388 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:46 crc kubenswrapper[4876]: E1215 06:52:46.705488 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.740658 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.740743 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.740765 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.740797 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.740819 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:46Z","lastTransitionTime":"2025-12-15T06:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.845115 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.845183 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.845204 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.845227 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.845244 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:46Z","lastTransitionTime":"2025-12-15T06:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.948844 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.949427 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.949702 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.949917 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:46 crc kubenswrapper[4876]: I1215 06:52:46.950095 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:46Z","lastTransitionTime":"2025-12-15T06:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.055450 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.055524 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.055552 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.055579 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.055595 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:47Z","lastTransitionTime":"2025-12-15T06:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.158874 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.159287 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.159387 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.159464 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.159533 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:47Z","lastTransitionTime":"2025-12-15T06:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.263355 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.263411 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.263424 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.263444 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.263460 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:47Z","lastTransitionTime":"2025-12-15T06:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.366194 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.366250 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.366265 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.366285 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.366299 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:47Z","lastTransitionTime":"2025-12-15T06:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.469425 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.469720 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.469826 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.469949 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.470035 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:47Z","lastTransitionTime":"2025-12-15T06:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.572821 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.572884 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.572900 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.572926 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.572942 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:47Z","lastTransitionTime":"2025-12-15T06:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.676074 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.676189 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.676208 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.676270 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.676290 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:47Z","lastTransitionTime":"2025-12-15T06:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.779719 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.779814 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.779834 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.779863 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.779886 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:47Z","lastTransitionTime":"2025-12-15T06:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.883828 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.883881 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.883899 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.883928 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.883940 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:47Z","lastTransitionTime":"2025-12-15T06:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.986819 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.986871 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.986881 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.986901 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:47 crc kubenswrapper[4876]: I1215 06:52:47.986913 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:47Z","lastTransitionTime":"2025-12-15T06:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.090489 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.090641 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.090668 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.090701 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.090729 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:48Z","lastTransitionTime":"2025-12-15T06:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.194689 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.194742 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.194751 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.194770 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.194784 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:48Z","lastTransitionTime":"2025-12-15T06:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.297364 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.297420 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.297429 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.297446 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.297463 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:48Z","lastTransitionTime":"2025-12-15T06:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.400501 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.400554 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.400565 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.400585 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.400598 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:48Z","lastTransitionTime":"2025-12-15T06:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.504030 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.504124 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.504150 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.504182 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.504200 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:48Z","lastTransitionTime":"2025-12-15T06:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.607448 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.607493 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.607505 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.607525 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.607542 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:48Z","lastTransitionTime":"2025-12-15T06:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.704824 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.705012 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:48 crc kubenswrapper[4876]: E1215 06:52:48.705137 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.705174 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.705233 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:48 crc kubenswrapper[4876]: E1215 06:52:48.705642 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.705791 4876 scope.go:117] "RemoveContainer" containerID="562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93" Dec 15 06:52:48 crc kubenswrapper[4876]: E1215 06:52:48.705887 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:48 crc kubenswrapper[4876]: E1215 06:52:48.705918 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" Dec 15 06:52:48 crc kubenswrapper[4876]: E1215 06:52:48.705983 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.708983 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.709022 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.709034 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.709048 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.709059 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:48Z","lastTransitionTime":"2025-12-15T06:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.811684 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.811724 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.811735 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.811750 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.811760 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:48Z","lastTransitionTime":"2025-12-15T06:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.915292 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.915363 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.915382 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.915409 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:48 crc kubenswrapper[4876]: I1215 06:52:48.915429 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:48Z","lastTransitionTime":"2025-12-15T06:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.018312 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.018343 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.018368 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.018382 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.018391 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:49Z","lastTransitionTime":"2025-12-15T06:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.121214 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.121282 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.121305 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.121334 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.121355 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:49Z","lastTransitionTime":"2025-12-15T06:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.224596 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.224677 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.224693 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.224710 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.224722 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:49Z","lastTransitionTime":"2025-12-15T06:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.327490 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.327538 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.327552 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.327569 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.327580 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:49Z","lastTransitionTime":"2025-12-15T06:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.435465 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.435584 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.435601 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.435634 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.435651 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:49Z","lastTransitionTime":"2025-12-15T06:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.538898 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.538964 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.538983 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.539009 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.539028 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:49Z","lastTransitionTime":"2025-12-15T06:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.642923 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.642977 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.642988 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.643004 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.643013 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:49Z","lastTransitionTime":"2025-12-15T06:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.745872 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.745923 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.745933 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.745953 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.745964 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:49Z","lastTransitionTime":"2025-12-15T06:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.849521 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.849584 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.849603 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.849627 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.849643 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:49Z","lastTransitionTime":"2025-12-15T06:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.952606 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.952664 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.952678 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.952702 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:49 crc kubenswrapper[4876]: I1215 06:52:49.952716 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:49Z","lastTransitionTime":"2025-12-15T06:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.056055 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.056127 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.056139 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.056158 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.056170 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:50Z","lastTransitionTime":"2025-12-15T06:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.159666 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.159717 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.159731 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.159753 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.159766 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:50Z","lastTransitionTime":"2025-12-15T06:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.263279 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.263345 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.263363 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.263390 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.263409 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:50Z","lastTransitionTime":"2025-12-15T06:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.367002 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.367049 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.367058 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.367079 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.367091 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:50Z","lastTransitionTime":"2025-12-15T06:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.470589 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.470646 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.470661 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.470683 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.470696 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:50Z","lastTransitionTime":"2025-12-15T06:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.574335 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.574394 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.574411 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.574436 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.574451 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:50Z","lastTransitionTime":"2025-12-15T06:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.678429 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.678478 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.678491 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.678513 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.678526 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:50Z","lastTransitionTime":"2025-12-15T06:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.705411 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.705508 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.705411 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:50 crc kubenswrapper[4876]: E1215 06:52:50.705718 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:50 crc kubenswrapper[4876]: E1215 06:52:50.705813 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:50 crc kubenswrapper[4876]: E1215 06:52:50.705950 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.706210 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:50 crc kubenswrapper[4876]: E1215 06:52:50.706371 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.781416 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.781471 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.781481 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.781498 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.781508 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:50Z","lastTransitionTime":"2025-12-15T06:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.885723 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.885785 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.885798 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.885820 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.885830 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:50Z","lastTransitionTime":"2025-12-15T06:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.989567 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.989629 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.989643 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.989665 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:50 crc kubenswrapper[4876]: I1215 06:52:50.989677 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:50Z","lastTransitionTime":"2025-12-15T06:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.092941 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.092999 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.093010 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.093030 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.093043 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:51Z","lastTransitionTime":"2025-12-15T06:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.195767 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.195822 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.195836 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.195852 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.195867 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:51Z","lastTransitionTime":"2025-12-15T06:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.298629 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.298703 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.298726 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.298756 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.298778 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:51Z","lastTransitionTime":"2025-12-15T06:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.402445 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.402505 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.402522 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.402548 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.402568 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:51Z","lastTransitionTime":"2025-12-15T06:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.506012 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.506085 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.506137 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.506161 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.506178 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:51Z","lastTransitionTime":"2025-12-15T06:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.609386 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.609460 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.609471 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.609496 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.609507 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:51Z","lastTransitionTime":"2025-12-15T06:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.713395 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.713435 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.713445 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.713463 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.713472 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:51Z","lastTransitionTime":"2025-12-15T06:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.816848 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.817708 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.817740 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.817768 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.817786 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:51Z","lastTransitionTime":"2025-12-15T06:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.921400 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.921467 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.921478 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.921511 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:51 crc kubenswrapper[4876]: I1215 06:52:51.921524 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:51Z","lastTransitionTime":"2025-12-15T06:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.024053 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.024088 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.024097 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.024126 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.024135 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:52Z","lastTransitionTime":"2025-12-15T06:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.127170 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.127258 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.127281 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.127314 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.127336 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:52Z","lastTransitionTime":"2025-12-15T06:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.230352 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.230427 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.230452 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.230482 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.230503 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:52Z","lastTransitionTime":"2025-12-15T06:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.333356 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.333436 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.333473 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.333501 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.333522 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:52Z","lastTransitionTime":"2025-12-15T06:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.435763 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.435819 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.435836 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.435858 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.435875 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:52Z","lastTransitionTime":"2025-12-15T06:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.539191 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.539232 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.539245 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.539265 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.539277 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:52Z","lastTransitionTime":"2025-12-15T06:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.642597 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.642662 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.642680 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.642705 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.642722 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:52Z","lastTransitionTime":"2025-12-15T06:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.705432 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.705649 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:52 crc kubenswrapper[4876]: E1215 06:52:52.705663 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:52 crc kubenswrapper[4876]: E1215 06:52:52.705893 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.705967 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:52 crc kubenswrapper[4876]: E1215 06:52:52.706188 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.706248 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:52 crc kubenswrapper[4876]: E1215 06:52:52.706410 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.746255 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.746319 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.746343 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.746371 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.746397 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:52Z","lastTransitionTime":"2025-12-15T06:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.850843 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.850912 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.850930 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.850953 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.850973 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:52Z","lastTransitionTime":"2025-12-15T06:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.954087 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.954195 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.954265 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.954296 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:52 crc kubenswrapper[4876]: I1215 06:52:52.954320 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:52Z","lastTransitionTime":"2025-12-15T06:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.057696 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.057786 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.057803 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.057823 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.057840 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:53Z","lastTransitionTime":"2025-12-15T06:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.161200 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.161293 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.161325 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.161355 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.161377 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:53Z","lastTransitionTime":"2025-12-15T06:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.265479 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.265550 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.265569 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.265599 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.265619 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:53Z","lastTransitionTime":"2025-12-15T06:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.368458 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.368522 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.368541 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.368566 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.368588 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:53Z","lastTransitionTime":"2025-12-15T06:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.471169 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.471240 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.471264 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.471296 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.471320 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:53Z","lastTransitionTime":"2025-12-15T06:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.574529 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.574604 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.574625 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.574655 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.574672 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:53Z","lastTransitionTime":"2025-12-15T06:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.677951 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.678057 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.678079 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.678127 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.678149 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:53Z","lastTransitionTime":"2025-12-15T06:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.781386 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.781454 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.781472 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.781497 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.781515 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:53Z","lastTransitionTime":"2025-12-15T06:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.884690 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.884782 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.884805 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.884836 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.884860 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:53Z","lastTransitionTime":"2025-12-15T06:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.988219 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.988267 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.988282 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.988306 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:53 crc kubenswrapper[4876]: I1215 06:52:53.988327 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:53Z","lastTransitionTime":"2025-12-15T06:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.091070 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.091155 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.091175 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.091200 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.091217 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:54Z","lastTransitionTime":"2025-12-15T06:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.193327 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.193408 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.193423 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.193446 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.193461 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:54Z","lastTransitionTime":"2025-12-15T06:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.296239 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.296293 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.296308 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.296325 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.296337 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:54Z","lastTransitionTime":"2025-12-15T06:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.399886 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.399951 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.399966 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.399985 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.400004 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:54Z","lastTransitionTime":"2025-12-15T06:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.503657 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.503718 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.503738 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.503764 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.503782 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:54Z","lastTransitionTime":"2025-12-15T06:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.607470 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.607534 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.607547 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.607568 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.607584 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:54Z","lastTransitionTime":"2025-12-15T06:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.704895 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.704949 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.704894 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:54 crc kubenswrapper[4876]: E1215 06:52:54.705152 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.705176 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:54 crc kubenswrapper[4876]: E1215 06:52:54.705297 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:54 crc kubenswrapper[4876]: E1215 06:52:54.705356 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:54 crc kubenswrapper[4876]: E1215 06:52:54.705389 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.711438 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.711510 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.711558 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.711603 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.711627 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:54Z","lastTransitionTime":"2025-12-15T06:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.730549 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=23.73052199 podStartE2EDuration="23.73052199s" podCreationTimestamp="2025-12-15 06:52:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:54.729527463 +0000 UTC m=+100.300670414" watchObservedRunningTime="2025-12-15 06:52:54.73052199 +0000 UTC m=+100.301664941" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.814505 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.814537 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.814546 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.814577 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.814587 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:54Z","lastTransitionTime":"2025-12-15T06:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.838626 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-vlwzk" podStartSLOduration=76.838610566 podStartE2EDuration="1m16.838610566s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:54.838294887 +0000 UTC m=+100.409437798" watchObservedRunningTime="2025-12-15 06:52:54.838610566 +0000 UTC m=+100.409753477" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.853059 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-ddcwq" podStartSLOduration=76.853025905 podStartE2EDuration="1m16.853025905s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:54.852588914 +0000 UTC m=+100.423731835" watchObservedRunningTime="2025-12-15 06:52:54.853025905 +0000 UTC m=+100.424168876" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.869081 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-5kzkz" podStartSLOduration=75.869063899 podStartE2EDuration="1m15.869063899s" podCreationTimestamp="2025-12-15 06:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:54.867762733 +0000 UTC m=+100.438905644" watchObservedRunningTime="2025-12-15 06:52:54.869063899 +0000 UTC m=+100.440206810" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.915073 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=81.915052724 podStartE2EDuration="1m21.915052724s" podCreationTimestamp="2025-12-15 06:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:54.913917232 +0000 UTC m=+100.485060143" watchObservedRunningTime="2025-12-15 06:52:54.915052724 +0000 UTC m=+100.486195635" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.917337 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.917378 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.917389 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.917410 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.917422 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:54Z","lastTransitionTime":"2025-12-15T06:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.937321 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=82.93729396 podStartE2EDuration="1m22.93729396s" podCreationTimestamp="2025-12-15 06:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:54.935949823 +0000 UTC m=+100.507092744" watchObservedRunningTime="2025-12-15 06:52:54.93729396 +0000 UTC m=+100.508436891" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.953040 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=80.953012926 podStartE2EDuration="1m20.953012926s" podCreationTimestamp="2025-12-15 06:51:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:54.952155093 +0000 UTC m=+100.523298024" watchObservedRunningTime="2025-12-15 06:52:54.953012926 +0000 UTC m=+100.524155857" Dec 15 06:52:54 crc kubenswrapper[4876]: I1215 06:52:54.981572 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-nkxj7" podStartSLOduration=76.981547767 podStartE2EDuration="1m16.981547767s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:54.981166846 +0000 UTC m=+100.552309757" watchObservedRunningTime="2025-12-15 06:52:54.981547767 +0000 UTC m=+100.552690678" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.021570 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.021609 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.021621 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.021638 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.021651 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:55Z","lastTransitionTime":"2025-12-15T06:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.033358 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=55.033335802 podStartE2EDuration="55.033335802s" podCreationTimestamp="2025-12-15 06:52:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:55.033169158 +0000 UTC m=+100.604312089" watchObservedRunningTime="2025-12-15 06:52:55.033335802 +0000 UTC m=+100.604478723" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.064793 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-47dbh" podStartSLOduration=77.064773663 podStartE2EDuration="1m17.064773663s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:55.064342841 +0000 UTC m=+100.635485762" watchObservedRunningTime="2025-12-15 06:52:55.064773663 +0000 UTC m=+100.635916574" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.124728 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.124807 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.124827 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.124864 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.124882 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:55Z","lastTransitionTime":"2025-12-15T06:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.228263 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.228698 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.228715 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.228739 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.228756 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:55Z","lastTransitionTime":"2025-12-15T06:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.331613 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.331672 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.331690 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.331714 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.331734 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:55Z","lastTransitionTime":"2025-12-15T06:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.435147 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.435211 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.435229 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.435254 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.435273 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:55Z","lastTransitionTime":"2025-12-15T06:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.538477 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.538544 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.538562 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.538588 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.538607 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:55Z","lastTransitionTime":"2025-12-15T06:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.642399 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.642468 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.642481 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.642511 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.642525 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:55Z","lastTransitionTime":"2025-12-15T06:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.691076 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.691199 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.691214 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.691239 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.691255 4876 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-15T06:52:55Z","lastTransitionTime":"2025-12-15T06:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.752185 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podStartSLOduration=77.75214966 podStartE2EDuration="1m17.75214966s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:55.081613789 +0000 UTC m=+100.652756730" watchObservedRunningTime="2025-12-15 06:52:55.75214966 +0000 UTC m=+101.323292621" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.754768 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8"] Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.755508 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.761745 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.762346 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.762528 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.762540 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.874328 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3027bf3c-594c-49f8-90ee-c7c44df579e3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.874647 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3027bf3c-594c-49f8-90ee-c7c44df579e3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.874808 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3027bf3c-594c-49f8-90ee-c7c44df579e3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.874925 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3027bf3c-594c-49f8-90ee-c7c44df579e3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.875134 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3027bf3c-594c-49f8-90ee-c7c44df579e3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.976762 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3027bf3c-594c-49f8-90ee-c7c44df579e3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.976874 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3027bf3c-594c-49f8-90ee-c7c44df579e3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.976947 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3027bf3c-594c-49f8-90ee-c7c44df579e3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.977020 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3027bf3c-594c-49f8-90ee-c7c44df579e3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.977054 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3027bf3c-594c-49f8-90ee-c7c44df579e3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.977964 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3027bf3c-594c-49f8-90ee-c7c44df579e3-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.978139 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3027bf3c-594c-49f8-90ee-c7c44df579e3-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.979245 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3027bf3c-594c-49f8-90ee-c7c44df579e3-service-ca\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:55 crc kubenswrapper[4876]: I1215 06:52:55.986835 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3027bf3c-594c-49f8-90ee-c7c44df579e3-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:56 crc kubenswrapper[4876]: I1215 06:52:56.000343 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3027bf3c-594c-49f8-90ee-c7c44df579e3-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-g95v8\" (UID: \"3027bf3c-594c-49f8-90ee-c7c44df579e3\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:56 crc kubenswrapper[4876]: I1215 06:52:56.082521 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" Dec 15 06:52:56 crc kubenswrapper[4876]: I1215 06:52:56.326458 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" event={"ID":"3027bf3c-594c-49f8-90ee-c7c44df579e3","Type":"ContainerStarted","Data":"1f3d4bf30225aaab8c0ab78abdf4c56ea7f4605b85197eab3c23bec77ae95bb2"} Dec 15 06:52:56 crc kubenswrapper[4876]: I1215 06:52:56.326546 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" event={"ID":"3027bf3c-594c-49f8-90ee-c7c44df579e3","Type":"ContainerStarted","Data":"b41aa188e1d263b9d6e0e8e236a3663292976fced456a757dac0d2109f6027fc"} Dec 15 06:52:56 crc kubenswrapper[4876]: I1215 06:52:56.705318 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:56 crc kubenswrapper[4876]: E1215 06:52:56.705530 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:56 crc kubenswrapper[4876]: I1215 06:52:56.705605 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:56 crc kubenswrapper[4876]: I1215 06:52:56.705834 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:56 crc kubenswrapper[4876]: I1215 06:52:56.706158 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:56 crc kubenswrapper[4876]: E1215 06:52:56.706363 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:52:56 crc kubenswrapper[4876]: E1215 06:52:56.706165 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:56 crc kubenswrapper[4876]: E1215 06:52:56.706606 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:57 crc kubenswrapper[4876]: I1215 06:52:57.090481 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:57 crc kubenswrapper[4876]: E1215 06:52:57.090764 4876 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:52:57 crc kubenswrapper[4876]: E1215 06:52:57.090872 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs podName:daee20e1-a017-4464-9626-ea2c52cfae57 nodeName:}" failed. No retries permitted until 2025-12-15 06:54:01.090842489 +0000 UTC m=+166.661985440 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs") pod "network-metrics-daemon-rzth5" (UID: "daee20e1-a017-4464-9626-ea2c52cfae57") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 15 06:52:58 crc kubenswrapper[4876]: I1215 06:52:58.705036 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:52:58 crc kubenswrapper[4876]: I1215 06:52:58.705218 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:52:58 crc kubenswrapper[4876]: E1215 06:52:58.705300 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:52:58 crc kubenswrapper[4876]: I1215 06:52:58.705074 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:52:58 crc kubenswrapper[4876]: E1215 06:52:58.705444 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:52:58 crc kubenswrapper[4876]: E1215 06:52:58.705642 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:52:58 crc kubenswrapper[4876]: I1215 06:52:58.705685 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:52:58 crc kubenswrapper[4876]: E1215 06:52:58.705903 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:00 crc kubenswrapper[4876]: I1215 06:53:00.704916 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:00 crc kubenswrapper[4876]: I1215 06:53:00.704920 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:00 crc kubenswrapper[4876]: I1215 06:53:00.705147 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:00 crc kubenswrapper[4876]: E1215 06:53:00.705197 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:00 crc kubenswrapper[4876]: I1215 06:53:00.705166 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:00 crc kubenswrapper[4876]: E1215 06:53:00.705293 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:00 crc kubenswrapper[4876]: E1215 06:53:00.705438 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:00 crc kubenswrapper[4876]: E1215 06:53:00.705774 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:02 crc kubenswrapper[4876]: I1215 06:53:02.705382 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:02 crc kubenswrapper[4876]: I1215 06:53:02.705447 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:02 crc kubenswrapper[4876]: E1215 06:53:02.705608 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:02 crc kubenswrapper[4876]: I1215 06:53:02.705976 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:02 crc kubenswrapper[4876]: E1215 06:53:02.706157 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:02 crc kubenswrapper[4876]: I1215 06:53:02.706228 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:02 crc kubenswrapper[4876]: E1215 06:53:02.706422 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:02 crc kubenswrapper[4876]: E1215 06:53:02.706520 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:03 crc kubenswrapper[4876]: I1215 06:53:03.705850 4876 scope.go:117] "RemoveContainer" containerID="562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93" Dec 15 06:53:03 crc kubenswrapper[4876]: E1215 06:53:03.706353 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" Dec 15 06:53:04 crc kubenswrapper[4876]: I1215 06:53:04.705609 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:04 crc kubenswrapper[4876]: I1215 06:53:04.705676 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:04 crc kubenswrapper[4876]: I1215 06:53:04.705693 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:04 crc kubenswrapper[4876]: I1215 06:53:04.705705 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:04 crc kubenswrapper[4876]: E1215 06:53:04.707735 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:04 crc kubenswrapper[4876]: E1215 06:53:04.708288 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:04 crc kubenswrapper[4876]: E1215 06:53:04.708397 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:04 crc kubenswrapper[4876]: E1215 06:53:04.708593 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:06 crc kubenswrapper[4876]: I1215 06:53:06.705362 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:06 crc kubenswrapper[4876]: I1215 06:53:06.705465 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:06 crc kubenswrapper[4876]: I1215 06:53:06.705362 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:06 crc kubenswrapper[4876]: E1215 06:53:06.705498 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:06 crc kubenswrapper[4876]: I1215 06:53:06.705561 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:06 crc kubenswrapper[4876]: E1215 06:53:06.705715 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:06 crc kubenswrapper[4876]: E1215 06:53:06.705764 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:06 crc kubenswrapper[4876]: E1215 06:53:06.705829 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:08 crc kubenswrapper[4876]: I1215 06:53:08.704600 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:08 crc kubenswrapper[4876]: I1215 06:53:08.704733 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:08 crc kubenswrapper[4876]: I1215 06:53:08.704764 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:08 crc kubenswrapper[4876]: I1215 06:53:08.704816 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:08 crc kubenswrapper[4876]: E1215 06:53:08.705559 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:08 crc kubenswrapper[4876]: E1215 06:53:08.705617 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:08 crc kubenswrapper[4876]: E1215 06:53:08.705760 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:08 crc kubenswrapper[4876]: E1215 06:53:08.705882 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:10 crc kubenswrapper[4876]: I1215 06:53:10.705437 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:10 crc kubenswrapper[4876]: I1215 06:53:10.705578 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:10 crc kubenswrapper[4876]: E1215 06:53:10.705916 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:10 crc kubenswrapper[4876]: I1215 06:53:10.705696 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:10 crc kubenswrapper[4876]: E1215 06:53:10.706025 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:10 crc kubenswrapper[4876]: I1215 06:53:10.705593 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:10 crc kubenswrapper[4876]: E1215 06:53:10.706212 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:10 crc kubenswrapper[4876]: E1215 06:53:10.706374 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.387647 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ddcwq_d2c0440d-a8eb-4f51-8626-c3bb9d1b0867/kube-multus/1.log" Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.388560 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ddcwq_d2c0440d-a8eb-4f51-8626-c3bb9d1b0867/kube-multus/0.log" Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.388628 4876 generic.go:334] "Generic (PLEG): container finished" podID="d2c0440d-a8eb-4f51-8626-c3bb9d1b0867" containerID="112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008" exitCode=1 Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.388682 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ddcwq" event={"ID":"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867","Type":"ContainerDied","Data":"112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008"} Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.388742 4876 scope.go:117] "RemoveContainer" containerID="522717f11a1e91eeacc5e0a4fb05427f8deaf9d85674682a6b89219ba89cd562" Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.389370 4876 scope.go:117] "RemoveContainer" containerID="112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008" Dec 15 06:53:12 crc kubenswrapper[4876]: E1215 06:53:12.389747 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-ddcwq_openshift-multus(d2c0440d-a8eb-4f51-8626-c3bb9d1b0867)\"" pod="openshift-multus/multus-ddcwq" podUID="d2c0440d-a8eb-4f51-8626-c3bb9d1b0867" Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.417253 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-g95v8" podStartSLOduration=94.417228373 podStartE2EDuration="1m34.417228373s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:52:56.341768701 +0000 UTC m=+101.912911612" watchObservedRunningTime="2025-12-15 06:53:12.417228373 +0000 UTC m=+117.988371354" Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.704512 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.704557 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.704625 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:12 crc kubenswrapper[4876]: E1215 06:53:12.704779 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:12 crc kubenswrapper[4876]: I1215 06:53:12.704890 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:12 crc kubenswrapper[4876]: E1215 06:53:12.704941 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:12 crc kubenswrapper[4876]: E1215 06:53:12.705095 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:12 crc kubenswrapper[4876]: E1215 06:53:12.705335 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:13 crc kubenswrapper[4876]: I1215 06:53:13.395420 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ddcwq_d2c0440d-a8eb-4f51-8626-c3bb9d1b0867/kube-multus/1.log" Dec 15 06:53:14 crc kubenswrapper[4876]: E1215 06:53:14.663654 4876 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 15 06:53:14 crc kubenswrapper[4876]: I1215 06:53:14.705330 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:14 crc kubenswrapper[4876]: I1215 06:53:14.705459 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:14 crc kubenswrapper[4876]: I1215 06:53:14.705820 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:14 crc kubenswrapper[4876]: E1215 06:53:14.707254 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:14 crc kubenswrapper[4876]: E1215 06:53:14.707481 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:14 crc kubenswrapper[4876]: E1215 06:53:14.707631 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:14 crc kubenswrapper[4876]: I1215 06:53:14.707819 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:14 crc kubenswrapper[4876]: E1215 06:53:14.708136 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:14 crc kubenswrapper[4876]: E1215 06:53:14.789370 4876 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 15 06:53:16 crc kubenswrapper[4876]: I1215 06:53:16.704982 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:16 crc kubenswrapper[4876]: I1215 06:53:16.705036 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:16 crc kubenswrapper[4876]: I1215 06:53:16.704983 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:16 crc kubenswrapper[4876]: E1215 06:53:16.705174 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:16 crc kubenswrapper[4876]: E1215 06:53:16.705274 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:16 crc kubenswrapper[4876]: I1215 06:53:16.705330 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:16 crc kubenswrapper[4876]: E1215 06:53:16.705434 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:16 crc kubenswrapper[4876]: E1215 06:53:16.706059 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:16 crc kubenswrapper[4876]: I1215 06:53:16.706709 4876 scope.go:117] "RemoveContainer" containerID="562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93" Dec 15 06:53:16 crc kubenswrapper[4876]: E1215 06:53:16.706982 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-wm92c_openshift-ovn-kubernetes(2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" Dec 15 06:53:18 crc kubenswrapper[4876]: I1215 06:53:18.704831 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:18 crc kubenswrapper[4876]: I1215 06:53:18.704893 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:18 crc kubenswrapper[4876]: E1215 06:53:18.704987 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:18 crc kubenswrapper[4876]: I1215 06:53:18.705052 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:18 crc kubenswrapper[4876]: E1215 06:53:18.705341 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:18 crc kubenswrapper[4876]: E1215 06:53:18.705478 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:18 crc kubenswrapper[4876]: I1215 06:53:18.705594 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:18 crc kubenswrapper[4876]: E1215 06:53:18.705839 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:19 crc kubenswrapper[4876]: E1215 06:53:19.790511 4876 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 15 06:53:20 crc kubenswrapper[4876]: I1215 06:53:20.705544 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:20 crc kubenswrapper[4876]: I1215 06:53:20.705774 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:20 crc kubenswrapper[4876]: E1215 06:53:20.706033 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:20 crc kubenswrapper[4876]: I1215 06:53:20.706238 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:20 crc kubenswrapper[4876]: I1215 06:53:20.706268 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:20 crc kubenswrapper[4876]: E1215 06:53:20.706424 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:20 crc kubenswrapper[4876]: E1215 06:53:20.706576 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:20 crc kubenswrapper[4876]: E1215 06:53:20.706712 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:22 crc kubenswrapper[4876]: I1215 06:53:22.704560 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:22 crc kubenswrapper[4876]: I1215 06:53:22.704594 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:22 crc kubenswrapper[4876]: E1215 06:53:22.704801 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:22 crc kubenswrapper[4876]: I1215 06:53:22.704905 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:22 crc kubenswrapper[4876]: I1215 06:53:22.704930 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:22 crc kubenswrapper[4876]: E1215 06:53:22.704985 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:22 crc kubenswrapper[4876]: E1215 06:53:22.705280 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:22 crc kubenswrapper[4876]: E1215 06:53:22.705516 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:24 crc kubenswrapper[4876]: I1215 06:53:24.705368 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:24 crc kubenswrapper[4876]: E1215 06:53:24.707904 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:24 crc kubenswrapper[4876]: I1215 06:53:24.707958 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:24 crc kubenswrapper[4876]: I1215 06:53:24.708025 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:24 crc kubenswrapper[4876]: E1215 06:53:24.708245 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:24 crc kubenswrapper[4876]: E1215 06:53:24.708472 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:24 crc kubenswrapper[4876]: I1215 06:53:24.708048 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:24 crc kubenswrapper[4876]: E1215 06:53:24.708682 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:24 crc kubenswrapper[4876]: E1215 06:53:24.792069 4876 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 15 06:53:26 crc kubenswrapper[4876]: I1215 06:53:26.705064 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:26 crc kubenswrapper[4876]: E1215 06:53:26.705297 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:26 crc kubenswrapper[4876]: I1215 06:53:26.705465 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:26 crc kubenswrapper[4876]: I1215 06:53:26.705583 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:26 crc kubenswrapper[4876]: I1215 06:53:26.705696 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:26 crc kubenswrapper[4876]: E1215 06:53:26.705766 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:26 crc kubenswrapper[4876]: I1215 06:53:26.706096 4876 scope.go:117] "RemoveContainer" containerID="112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008" Dec 15 06:53:26 crc kubenswrapper[4876]: E1215 06:53:26.706252 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:26 crc kubenswrapper[4876]: E1215 06:53:26.706341 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:27 crc kubenswrapper[4876]: I1215 06:53:27.447651 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ddcwq_d2c0440d-a8eb-4f51-8626-c3bb9d1b0867/kube-multus/1.log" Dec 15 06:53:27 crc kubenswrapper[4876]: I1215 06:53:27.447997 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ddcwq" event={"ID":"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867","Type":"ContainerStarted","Data":"1cb7b04b03ffa94f95360a75b866a80620d08fc9d3cff5391fa24f7a0627d3be"} Dec 15 06:53:28 crc kubenswrapper[4876]: I1215 06:53:28.705390 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:28 crc kubenswrapper[4876]: I1215 06:53:28.705422 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:28 crc kubenswrapper[4876]: E1215 06:53:28.705534 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:28 crc kubenswrapper[4876]: I1215 06:53:28.705386 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:28 crc kubenswrapper[4876]: E1215 06:53:28.705668 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:28 crc kubenswrapper[4876]: E1215 06:53:28.705811 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:28 crc kubenswrapper[4876]: I1215 06:53:28.705422 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:28 crc kubenswrapper[4876]: E1215 06:53:28.706580 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:29 crc kubenswrapper[4876]: I1215 06:53:29.706325 4876 scope.go:117] "RemoveContainer" containerID="562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93" Dec 15 06:53:29 crc kubenswrapper[4876]: E1215 06:53:29.792594 4876 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 15 06:53:30 crc kubenswrapper[4876]: I1215 06:53:30.460962 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/3.log" Dec 15 06:53:30 crc kubenswrapper[4876]: I1215 06:53:30.463904 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rzth5"] Dec 15 06:53:30 crc kubenswrapper[4876]: I1215 06:53:30.464007 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:30 crc kubenswrapper[4876]: E1215 06:53:30.464137 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:30 crc kubenswrapper[4876]: I1215 06:53:30.464634 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerStarted","Data":"d114696e400065dcb1a37c4ed1dd3612ec78a17c0ddc386bd9405341a22ba343"} Dec 15 06:53:30 crc kubenswrapper[4876]: I1215 06:53:30.465278 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:53:30 crc kubenswrapper[4876]: I1215 06:53:30.507780 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podStartSLOduration=112.50775189 podStartE2EDuration="1m52.50775189s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:30.506805373 +0000 UTC m=+136.077948304" watchObservedRunningTime="2025-12-15 06:53:30.50775189 +0000 UTC m=+136.078894811" Dec 15 06:53:30 crc kubenswrapper[4876]: I1215 06:53:30.705457 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:30 crc kubenswrapper[4876]: I1215 06:53:30.705474 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:30 crc kubenswrapper[4876]: I1215 06:53:30.705566 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:30 crc kubenswrapper[4876]: E1215 06:53:30.705704 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:30 crc kubenswrapper[4876]: E1215 06:53:30.705804 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:30 crc kubenswrapper[4876]: E1215 06:53:30.705931 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:31 crc kubenswrapper[4876]: I1215 06:53:31.705271 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:31 crc kubenswrapper[4876]: E1215 06:53:31.705862 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:32 crc kubenswrapper[4876]: I1215 06:53:32.704878 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:32 crc kubenswrapper[4876]: I1215 06:53:32.704936 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:32 crc kubenswrapper[4876]: I1215 06:53:32.704959 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:32 crc kubenswrapper[4876]: E1215 06:53:32.705675 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:32 crc kubenswrapper[4876]: E1215 06:53:32.705819 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:32 crc kubenswrapper[4876]: E1215 06:53:32.706590 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:33 crc kubenswrapper[4876]: I1215 06:53:33.704731 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:33 crc kubenswrapper[4876]: E1215 06:53:33.704929 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rzth5" podUID="daee20e1-a017-4464-9626-ea2c52cfae57" Dec 15 06:53:34 crc kubenswrapper[4876]: I1215 06:53:34.704685 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:34 crc kubenswrapper[4876]: I1215 06:53:34.704713 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:34 crc kubenswrapper[4876]: I1215 06:53:34.704716 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:34 crc kubenswrapper[4876]: E1215 06:53:34.706972 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 15 06:53:34 crc kubenswrapper[4876]: E1215 06:53:34.707100 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 15 06:53:34 crc kubenswrapper[4876]: E1215 06:53:34.706862 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 15 06:53:35 crc kubenswrapper[4876]: I1215 06:53:35.705344 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:53:35 crc kubenswrapper[4876]: I1215 06:53:35.708159 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 15 06:53:35 crc kubenswrapper[4876]: I1215 06:53:35.710306 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.310917 4876 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.373965 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6pnt6"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.376691 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.379253 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.379925 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:36 crc kubenswrapper[4876]: W1215 06:53:36.380213 4876 reflector.go:561] object-"openshift-controller-manager"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 15 06:53:36 crc kubenswrapper[4876]: E1215 06:53:36.380257 4876 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 15 06:53:36 crc kubenswrapper[4876]: W1215 06:53:36.380263 4876 reflector.go:561] object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c": failed to list *v1.Secret: secrets "openshift-controller-manager-sa-dockercfg-msq4c" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 15 06:53:36 crc kubenswrapper[4876]: W1215 06:53:36.380300 4876 reflector.go:561] object-"openshift-controller-manager"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 15 06:53:36 crc kubenswrapper[4876]: W1215 06:53:36.380320 4876 reflector.go:561] object-"openshift-controller-manager"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 15 06:53:36 crc kubenswrapper[4876]: E1215 06:53:36.380324 4876 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-controller-manager-sa-dockercfg-msq4c\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-controller-manager-sa-dockercfg-msq4c\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 15 06:53:36 crc kubenswrapper[4876]: E1215 06:53:36.380345 4876 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 15 06:53:36 crc kubenswrapper[4876]: E1215 06:53:36.380355 4876 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 15 06:53:36 crc kubenswrapper[4876]: W1215 06:53:36.380473 4876 reflector.go:561] object-"openshift-controller-manager"/"openshift-global-ca": failed to list *v1.ConfigMap: configmaps "openshift-global-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 15 06:53:36 crc kubenswrapper[4876]: E1215 06:53:36.380941 4876 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-global-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-global-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 15 06:53:36 crc kubenswrapper[4876]: W1215 06:53:36.386664 4876 reflector.go:561] object-"openshift-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.386719 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fztl2"] Dec 15 06:53:36 crc kubenswrapper[4876]: E1215 06:53:36.386744 4876 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.387083 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: W1215 06:53:36.387196 4876 reflector.go:561] object-"openshift-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.387205 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 15 06:53:36 crc kubenswrapper[4876]: E1215 06:53:36.387241 4876 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 15 06:53:36 crc kubenswrapper[4876]: W1215 06:53:36.387527 4876 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert": failed to list *v1.Secret: secrets "openshift-apiserver-operator-serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Dec 15 06:53:36 crc kubenswrapper[4876]: E1215 06:53:36.387599 4876 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.387670 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.391781 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.392704 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.399729 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.400751 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.400945 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.403206 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.403453 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.403515 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.404282 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.404503 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kqs9w"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.405504 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.407559 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.408462 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.408497 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.408967 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.409056 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.409301 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-dr96f"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.409792 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.410013 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-x6r8c"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.408984 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.410464 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.412584 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.413415 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.414052 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.414500 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.414769 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.414914 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.414974 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.415086 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.415145 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.415177 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.415255 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.415389 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.420257 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.420992 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.422254 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.422483 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.422672 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.423089 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.425179 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.425702 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.425821 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-bkzhk"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.425910 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.426092 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.426237 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.426440 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.426631 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-48ggq"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.427037 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.428034 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.428184 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.428299 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.428464 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.428584 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.428768 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.428896 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.428994 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.429094 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.429232 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.429316 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.432500 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.432801 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.435396 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.447593 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-f28h8"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.451303 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.453806 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-t872l"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.455411 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.455485 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.456349 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.463372 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.463637 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.465961 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-cqvrf"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.466421 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-cqvrf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.466546 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.471473 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-q7skf"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.472125 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.478879 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.479034 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.479167 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.479700 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.479734 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.479857 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.479903 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.479982 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480006 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480059 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480218 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480237 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480375 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480429 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480483 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-p5nck"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480513 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480605 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480696 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.480783 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.481029 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.481035 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.481498 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.481620 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.481653 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.481703 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.482413 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.483591 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.485681 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.488966 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.489933 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490375 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490454 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490528 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490601 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490727 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490811 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490844 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490855 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490875 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490882 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490930 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.490981 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.491009 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.491067 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.491163 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.491250 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.491366 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.491509 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.491623 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.491626 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.491727 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.491786 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.492179 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.492381 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.493960 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.494099 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.495941 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.495843 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvwdp"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.524274 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.533774 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.534775 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.535044 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.535473 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.535877 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7k6p8"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.536243 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.536279 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.536642 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.537000 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-znkl5"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.537421 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.537463 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.537584 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.537886 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.537958 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.538313 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.538358 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.543024 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.543948 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.551342 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fqp2q"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.552129 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.553715 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.554190 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.554561 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.554788 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.554935 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.556402 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.556563 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.556748 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.556998 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.557374 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.557990 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.558911 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fztl2"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.559006 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.559596 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.559724 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6pnt6"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.560671 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.561612 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-x6r8c"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562023 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-config\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562060 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-etcd-serving-ca\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562087 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3c2acc-5299-4f75-a891-e018580e62ee-serving-cert\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562152 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-service-ca\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562190 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-trusted-ca-bundle\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562218 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-service-ca-bundle\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562303 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-config\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562343 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b07093d2-2832-4d8b-b80b-d0caf910e6b9-serving-cert\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562381 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562401 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562418 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562452 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-serving-cert\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562470 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7cwx\" (UniqueName: \"kubernetes.io/projected/ca3c2acc-5299-4f75-a891-e018580e62ee-kube-api-access-w7cwx\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562486 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-config\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562502 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fbba828-5587-4284-b497-12d70cf1b44f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2ngb7\" (UID: \"5fbba828-5587-4284-b497-12d70cf1b44f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562546 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-x2jnz"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562555 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phfvg\" (UniqueName: \"kubernetes.io/projected/0e84c5be-bb73-42da-b000-529df9355a16-kube-api-access-phfvg\") pod \"dns-operator-744455d44c-48ggq\" (UID: \"0e84c5be-bb73-42da-b000-529df9355a16\") " pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562609 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562653 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r78gp\" (UniqueName: \"kubernetes.io/projected/c6defe56-040f-4e05-9b36-28b7d70481df-kube-api-access-r78gp\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562688 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562706 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8xrh\" (UniqueName: \"kubernetes.io/projected/5fbba828-5587-4284-b497-12d70cf1b44f-kube-api-access-s8xrh\") pod \"openshift-controller-manager-operator-756b6f6bc6-2ngb7\" (UID: \"5fbba828-5587-4284-b497-12d70cf1b44f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562729 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-audit\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562772 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562793 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lj8dx\" (UniqueName: \"kubernetes.io/projected/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-kube-api-access-lj8dx\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562813 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fbba828-5587-4284-b497-12d70cf1b44f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2ngb7\" (UID: \"5fbba828-5587-4284-b497-12d70cf1b44f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562855 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b07093d2-2832-4d8b-b80b-d0caf910e6b9-audit-policies\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562870 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a18402db-2952-4cfb-bf3a-1f22f23483de-audit-dir\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562888 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-client-ca\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562923 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-image-import-ca\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562947 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvhhp\" (UniqueName: \"kubernetes.io/projected/a18402db-2952-4cfb-bf3a-1f22f23483de-kube-api-access-lvhhp\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.562970 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563013 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563050 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/62176145-6d99-48fb-866e-2c248f531ae5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-f28h8\" (UID: \"62176145-6d99-48fb-866e-2c248f531ae5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563091 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbvsx\" (UniqueName: \"kubernetes.io/projected/4771573b-f753-4de4-bfb5-7fe3608a0b53-kube-api-access-hbvsx\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563139 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-x2jnz" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563144 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-oauth-serving-cert\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563363 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6defe56-040f-4e05-9b36-28b7d70481df-config\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563389 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh8lr\" (UniqueName: \"kubernetes.io/projected/e6f138ff-658f-41f4-8067-72f1882a25a5-kube-api-access-bh8lr\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563419 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4b60d32-905c-4dfe-b8b3-49080372e76b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mhrtf\" (UID: \"a4b60d32-905c-4dfe-b8b3-49080372e76b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563421 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563456 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b07093d2-2832-4d8b-b80b-d0caf910e6b9-etcd-client\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563482 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-dir\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563509 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563534 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7lvq\" (UniqueName: \"kubernetes.io/projected/a4b60d32-905c-4dfe-b8b3-49080372e76b-kube-api-access-r7lvq\") pod \"openshift-apiserver-operator-796bbdcf4f-mhrtf\" (UID: \"a4b60d32-905c-4dfe-b8b3-49080372e76b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563573 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b07093d2-2832-4d8b-b80b-d0caf910e6b9-encryption-config\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563597 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0e84c5be-bb73-42da-b000-529df9355a16-metrics-tls\") pod \"dns-operator-744455d44c-48ggq\" (UID: \"0e84c5be-bb73-42da-b000-529df9355a16\") " pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563619 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c6defe56-040f-4e05-9b36-28b7d70481df-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563678 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563708 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563741 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlkh2\" (UniqueName: \"kubernetes.io/projected/62176145-6d99-48fb-866e-2c248f531ae5-kube-api-access-dlkh2\") pod \"openshift-config-operator-7777fb866f-f28h8\" (UID: \"62176145-6d99-48fb-866e-2c248f531ae5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563775 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563801 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563832 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a18402db-2952-4cfb-bf3a-1f22f23483de-encryption-config\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563885 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b07093d2-2832-4d8b-b80b-d0caf910e6b9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563916 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563945 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563970 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-client-ca\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.563989 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a18402db-2952-4cfb-bf3a-1f22f23483de-node-pullsecrets\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564018 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a18402db-2952-4cfb-bf3a-1f22f23483de-etcd-client\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564034 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62176145-6d99-48fb-866e-2c248f531ae5-serving-cert\") pod \"openshift-config-operator-7777fb866f-f28h8\" (UID: \"62176145-6d99-48fb-866e-2c248f531ae5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564049 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-policies\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564067 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-serving-cert\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564087 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8c2cb\" (UniqueName: \"kubernetes.io/projected/b020cb85-67cf-46ad-8909-823c4a13376e-kube-api-access-8c2cb\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564132 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmnw2\" (UniqueName: \"kubernetes.io/projected/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-kube-api-access-bmnw2\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564154 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a18402db-2952-4cfb-bf3a-1f22f23483de-serving-cert\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564184 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c6defe56-040f-4e05-9b36-28b7d70481df-images\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564205 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-oauth-config\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564301 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bhkz\" (UniqueName: \"kubernetes.io/projected/b07093d2-2832-4d8b-b80b-d0caf910e6b9-kube-api-access-9bhkz\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564440 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564464 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgt4g\" (UniqueName: \"kubernetes.io/projected/69946ceb-62be-4cf4-a350-d5e37ad74eaf-kube-api-access-lgt4g\") pod \"cluster-samples-operator-665b6dd947-9cvz2\" (UID: \"69946ceb-62be-4cf4-a350-d5e37ad74eaf\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564512 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564576 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b020cb85-67cf-46ad-8909-823c4a13376e-serving-cert\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564628 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-config\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564651 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b07093d2-2832-4d8b-b80b-d0caf910e6b9-audit-dir\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564696 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/69946ceb-62be-4cf4-a350-d5e37ad74eaf-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9cvz2\" (UID: \"69946ceb-62be-4cf4-a350-d5e37ad74eaf\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564725 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-config\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564779 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b07093d2-2832-4d8b-b80b-d0caf910e6b9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564795 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4b60d32-905c-4dfe-b8b3-49080372e76b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mhrtf\" (UID: \"a4b60d32-905c-4dfe-b8b3-49080372e76b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.564955 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-t872l"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.565642 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.566749 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.567936 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.569135 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-cqvrf"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.569979 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-q7skf"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.570996 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.572176 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.573413 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kqs9w"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.576599 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-dr96f"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.577477 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.578764 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.579137 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7k6p8"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.580442 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-bkzhk"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.582744 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.582784 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-72p8b"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.584401 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fqp2q"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.584503 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.585324 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.587233 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-48ggq"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.587314 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.590050 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.591262 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.601456 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-znkl5"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.604658 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.609276 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.632736 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.633152 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.635599 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvwdp"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.635994 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.638186 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.644651 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.646747 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.647975 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.655282 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.655340 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.655351 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-f28h8"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.656812 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-72p8b"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.657766 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-x2jnz"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.659378 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-gnrsz"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.660005 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.660127 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.660141 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-qxz8x"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.661123 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-qxz8x"] Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.661186 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665394 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-etcd-serving-ca\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665429 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-service-ca-bundle\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665453 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3c2acc-5299-4f75-a891-e018580e62ee-serving-cert\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665472 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-service-ca\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665488 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-trusted-ca-bundle\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665516 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-config\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665535 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b07093d2-2832-4d8b-b80b-d0caf910e6b9-serving-cert\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665555 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665577 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665604 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/be980b8d-1606-4294-8263-1df678c279fb-etcd-service-ca\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665624 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665644 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-serving-cert\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665665 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7cwx\" (UniqueName: \"kubernetes.io/projected/ca3c2acc-5299-4f75-a891-e018580e62ee-kube-api-access-w7cwx\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665686 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-config\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665705 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fbba828-5587-4284-b497-12d70cf1b44f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2ngb7\" (UID: \"5fbba828-5587-4284-b497-12d70cf1b44f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665735 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665756 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phfvg\" (UniqueName: \"kubernetes.io/projected/0e84c5be-bb73-42da-b000-529df9355a16-kube-api-access-phfvg\") pod \"dns-operator-744455d44c-48ggq\" (UID: \"0e84c5be-bb73-42da-b000-529df9355a16\") " pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665777 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r78gp\" (UniqueName: \"kubernetes.io/projected/c6defe56-040f-4e05-9b36-28b7d70481df-kube-api-access-r78gp\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665796 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665816 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8xrh\" (UniqueName: \"kubernetes.io/projected/5fbba828-5587-4284-b497-12d70cf1b44f-kube-api-access-s8xrh\") pod \"openshift-controller-manager-operator-756b6f6bc6-2ngb7\" (UID: \"5fbba828-5587-4284-b497-12d70cf1b44f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665835 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-audit\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665855 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665875 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lj8dx\" (UniqueName: \"kubernetes.io/projected/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-kube-api-access-lj8dx\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665893 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fbba828-5587-4284-b497-12d70cf1b44f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2ngb7\" (UID: \"5fbba828-5587-4284-b497-12d70cf1b44f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665912 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b07093d2-2832-4d8b-b80b-d0caf910e6b9-audit-policies\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665929 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a18402db-2952-4cfb-bf3a-1f22f23483de-audit-dir\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665948 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/be980b8d-1606-4294-8263-1df678c279fb-etcd-ca\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665969 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-client-ca\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.665989 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-image-import-ca\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666008 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvhhp\" (UniqueName: \"kubernetes.io/projected/a18402db-2952-4cfb-bf3a-1f22f23483de-kube-api-access-lvhhp\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666029 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666050 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666080 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/62176145-6d99-48fb-866e-2c248f531ae5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-f28h8\" (UID: \"62176145-6d99-48fb-866e-2c248f531ae5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666114 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbvsx\" (UniqueName: \"kubernetes.io/projected/4771573b-f753-4de4-bfb5-7fe3608a0b53-kube-api-access-hbvsx\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666135 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-oauth-serving-cert\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666167 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6defe56-040f-4e05-9b36-28b7d70481df-config\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666186 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh8lr\" (UniqueName: \"kubernetes.io/projected/e6f138ff-658f-41f4-8067-72f1882a25a5-kube-api-access-bh8lr\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666208 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4b60d32-905c-4dfe-b8b3-49080372e76b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mhrtf\" (UID: \"a4b60d32-905c-4dfe-b8b3-49080372e76b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666236 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b07093d2-2832-4d8b-b80b-d0caf910e6b9-etcd-client\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666256 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-dir\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666276 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666297 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7lvq\" (UniqueName: \"kubernetes.io/projected/a4b60d32-905c-4dfe-b8b3-49080372e76b-kube-api-access-r7lvq\") pod \"openshift-apiserver-operator-796bbdcf4f-mhrtf\" (UID: \"a4b60d32-905c-4dfe-b8b3-49080372e76b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666319 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b07093d2-2832-4d8b-b80b-d0caf910e6b9-encryption-config\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666341 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0e84c5be-bb73-42da-b000-529df9355a16-metrics-tls\") pod \"dns-operator-744455d44c-48ggq\" (UID: \"0e84c5be-bb73-42da-b000-529df9355a16\") " pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666362 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c6defe56-040f-4e05-9b36-28b7d70481df-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666385 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666404 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666424 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlkh2\" (UniqueName: \"kubernetes.io/projected/62176145-6d99-48fb-866e-2c248f531ae5-kube-api-access-dlkh2\") pod \"openshift-config-operator-7777fb866f-f28h8\" (UID: \"62176145-6d99-48fb-866e-2c248f531ae5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666444 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666463 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666483 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a18402db-2952-4cfb-bf3a-1f22f23483de-encryption-config\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666504 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b07093d2-2832-4d8b-b80b-d0caf910e6b9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666523 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666542 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666561 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-client-ca\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666580 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a18402db-2952-4cfb-bf3a-1f22f23483de-node-pullsecrets\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666599 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a18402db-2952-4cfb-bf3a-1f22f23483de-etcd-client\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666619 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62176145-6d99-48fb-866e-2c248f531ae5-serving-cert\") pod \"openshift-config-operator-7777fb866f-f28h8\" (UID: \"62176145-6d99-48fb-866e-2c248f531ae5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666638 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-policies\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666659 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-serving-cert\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666701 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8c2cb\" (UniqueName: \"kubernetes.io/projected/b020cb85-67cf-46ad-8909-823c4a13376e-kube-api-access-8c2cb\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666722 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmnw2\" (UniqueName: \"kubernetes.io/projected/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-kube-api-access-bmnw2\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666742 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a18402db-2952-4cfb-bf3a-1f22f23483de-serving-cert\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666763 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c6defe56-040f-4e05-9b36-28b7d70481df-images\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666773 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-etcd-serving-ca\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666782 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-oauth-config\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666872 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be980b8d-1606-4294-8263-1df678c279fb-config\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666878 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-service-ca-bundle\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666910 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bhkz\" (UniqueName: \"kubernetes.io/projected/b07093d2-2832-4d8b-b80b-d0caf910e6b9-kube-api-access-9bhkz\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666940 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666968 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgt4g\" (UniqueName: \"kubernetes.io/projected/69946ceb-62be-4cf4-a350-d5e37ad74eaf-kube-api-access-lgt4g\") pod \"cluster-samples-operator-665b6dd947-9cvz2\" (UID: \"69946ceb-62be-4cf4-a350-d5e37ad74eaf\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.666999 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667034 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b020cb85-67cf-46ad-8909-823c4a13376e-serving-cert\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667057 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-config\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667079 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b07093d2-2832-4d8b-b80b-d0caf910e6b9-audit-dir\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667120 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/69946ceb-62be-4cf4-a350-d5e37ad74eaf-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9cvz2\" (UID: \"69946ceb-62be-4cf4-a350-d5e37ad74eaf\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667150 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czhnv\" (UniqueName: \"kubernetes.io/projected/be980b8d-1606-4294-8263-1df678c279fb-kube-api-access-czhnv\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667179 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-config\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667201 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/be980b8d-1606-4294-8263-1df678c279fb-etcd-client\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667229 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b07093d2-2832-4d8b-b80b-d0caf910e6b9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667253 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4b60d32-905c-4dfe-b8b3-49080372e76b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mhrtf\" (UID: \"a4b60d32-905c-4dfe-b8b3-49080372e76b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667276 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be980b8d-1606-4294-8263-1df678c279fb-serving-cert\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.667305 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-config\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.668646 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b07093d2-2832-4d8b-b80b-d0caf910e6b9-audit-dir\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.669023 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-audit\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.669290 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.669351 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.669536 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.670025 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-config\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.670065 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-trusted-ca-bundle\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.670143 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-service-ca\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.670149 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.670879 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.670901 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fbba828-5587-4284-b497-12d70cf1b44f-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-2ngb7\" (UID: \"5fbba828-5587-4284-b497-12d70cf1b44f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.671252 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-config\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.671437 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b07093d2-2832-4d8b-b80b-d0caf910e6b9-audit-policies\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.671475 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a18402db-2952-4cfb-bf3a-1f22f23483de-audit-dir\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.671875 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/62176145-6d99-48fb-866e-2c248f531ae5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-f28h8\" (UID: \"62176145-6d99-48fb-866e-2c248f531ae5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.672090 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.672170 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-config\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.672377 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.672436 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b07093d2-2832-4d8b-b80b-d0caf910e6b9-serving-cert\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.672495 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a18402db-2952-4cfb-bf3a-1f22f23483de-node-pullsecrets\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.672605 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.672813 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-oauth-serving-cert\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.673663 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6defe56-040f-4e05-9b36-28b7d70481df-config\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.673724 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-policies\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.673998 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c6defe56-040f-4e05-9b36-28b7d70481df-images\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.674031 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.674140 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.675377 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-dir\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.676434 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c6defe56-040f-4e05-9b36-28b7d70481df-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.676628 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b07093d2-2832-4d8b-b80b-d0caf910e6b9-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.676702 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4b60d32-905c-4dfe-b8b3-49080372e76b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mhrtf\" (UID: \"a4b60d32-905c-4dfe-b8b3-49080372e76b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.677065 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b07093d2-2832-4d8b-b80b-d0caf910e6b9-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.677083 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a18402db-2952-4cfb-bf3a-1f22f23483de-etcd-client\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.677081 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.677322 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-config\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.678681 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.679942 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a18402db-2952-4cfb-bf3a-1f22f23483de-encryption-config\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.679996 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.680762 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a18402db-2952-4cfb-bf3a-1f22f23483de-image-import-ca\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.682071 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.682242 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b07093d2-2832-4d8b-b80b-d0caf910e6b9-encryption-config\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.682318 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/69946ceb-62be-4cf4-a350-d5e37ad74eaf-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-9cvz2\" (UID: \"69946ceb-62be-4cf4-a350-d5e37ad74eaf\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.682574 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62176145-6d99-48fb-866e-2c248f531ae5-serving-cert\") pod \"openshift-config-operator-7777fb866f-f28h8\" (UID: \"62176145-6d99-48fb-866e-2c248f531ae5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.682655 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3c2acc-5299-4f75-a891-e018580e62ee-serving-cert\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.683486 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-serving-cert\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.683719 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.684227 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a18402db-2952-4cfb-bf3a-1f22f23483de-serving-cert\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.684264 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0e84c5be-bb73-42da-b000-529df9355a16-metrics-tls\") pod \"dns-operator-744455d44c-48ggq\" (UID: \"0e84c5be-bb73-42da-b000-529df9355a16\") " pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.684360 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-serving-cert\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.685996 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b07093d2-2832-4d8b-b80b-d0caf910e6b9-etcd-client\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.686797 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-client-ca\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.688519 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-oauth-config\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.688794 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5fbba828-5587-4284-b497-12d70cf1b44f-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-2ngb7\" (UID: \"5fbba828-5587-4284-b497-12d70cf1b44f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.691320 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.700157 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.705125 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.705133 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.705165 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.718479 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.738661 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.758927 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.767818 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be980b8d-1606-4294-8263-1df678c279fb-config\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.767878 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czhnv\" (UniqueName: \"kubernetes.io/projected/be980b8d-1606-4294-8263-1df678c279fb-kube-api-access-czhnv\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.767897 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/be980b8d-1606-4294-8263-1df678c279fb-etcd-client\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.767914 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be980b8d-1606-4294-8263-1df678c279fb-serving-cert\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.767944 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/be980b8d-1606-4294-8263-1df678c279fb-etcd-service-ca\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.768006 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/be980b8d-1606-4294-8263-1df678c279fb-etcd-ca\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.768490 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be980b8d-1606-4294-8263-1df678c279fb-config\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.768699 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/be980b8d-1606-4294-8263-1df678c279fb-etcd-ca\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.768759 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/be980b8d-1606-4294-8263-1df678c279fb-etcd-service-ca\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.771352 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/be980b8d-1606-4294-8263-1df678c279fb-serving-cert\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.772180 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/be980b8d-1606-4294-8263-1df678c279fb-etcd-client\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.798610 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.818412 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.839633 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.859480 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.879594 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.899437 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.919205 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.938582 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.959206 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.978715 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 15 06:53:36 crc kubenswrapper[4876]: I1215 06:53:36.998889 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.019226 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.039498 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.058985 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.079962 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.100750 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.119854 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.139396 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.164739 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.178885 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.199222 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.230245 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.239949 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.260780 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.279099 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.299778 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.321412 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.339761 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.359694 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.379498 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.399296 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.420180 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.439017 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.459940 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.480289 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.502299 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.519545 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.538794 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.558055 4876 request.go:700] Waited for 1.018971304s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.559602 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.578935 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.600097 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.618736 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.639084 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.659794 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 15 06:53:37 crc kubenswrapper[4876]: E1215 06:53:37.670569 4876 secret.go:188] Couldn't get secret openshift-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 15 06:53:37 crc kubenswrapper[4876]: E1215 06:53:37.670844 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b020cb85-67cf-46ad-8909-823c4a13376e-serving-cert podName:b020cb85-67cf-46ad-8909-823c4a13376e nodeName:}" failed. No retries permitted until 2025-12-15 06:53:38.170814676 +0000 UTC m=+143.741957627 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/b020cb85-67cf-46ad-8909-823c4a13376e-serving-cert") pod "controller-manager-879f6c89f-6pnt6" (UID: "b020cb85-67cf-46ad-8909-823c4a13376e") : failed to sync secret cache: timed out waiting for the condition Dec 15 06:53:37 crc kubenswrapper[4876]: E1215 06:53:37.672840 4876 configmap.go:193] Couldn't get configMap openshift-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Dec 15 06:53:37 crc kubenswrapper[4876]: E1215 06:53:37.672870 4876 configmap.go:193] Couldn't get configMap openshift-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Dec 15 06:53:37 crc kubenswrapper[4876]: E1215 06:53:37.672938 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-client-ca podName:b020cb85-67cf-46ad-8909-823c4a13376e nodeName:}" failed. No retries permitted until 2025-12-15 06:53:38.172909394 +0000 UTC m=+143.744052345 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-client-ca") pod "controller-manager-879f6c89f-6pnt6" (UID: "b020cb85-67cf-46ad-8909-823c4a13376e") : failed to sync configmap cache: timed out waiting for the condition Dec 15 06:53:37 crc kubenswrapper[4876]: E1215 06:53:37.672965 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-config podName:b020cb85-67cf-46ad-8909-823c4a13376e nodeName:}" failed. No retries permitted until 2025-12-15 06:53:38.172951835 +0000 UTC m=+143.744094776 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-config") pod "controller-manager-879f6c89f-6pnt6" (UID: "b020cb85-67cf-46ad-8909-823c4a13376e") : failed to sync configmap cache: timed out waiting for the condition Dec 15 06:53:37 crc kubenswrapper[4876]: E1215 06:53:37.674251 4876 secret.go:188] Couldn't get secret openshift-apiserver-operator/openshift-apiserver-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 15 06:53:37 crc kubenswrapper[4876]: E1215 06:53:37.674345 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a4b60d32-905c-4dfe-b8b3-49080372e76b-serving-cert podName:a4b60d32-905c-4dfe-b8b3-49080372e76b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:38.174320043 +0000 UTC m=+143.745462994 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/a4b60d32-905c-4dfe-b8b3-49080372e76b-serving-cert") pod "openshift-apiserver-operator-796bbdcf4f-mhrtf" (UID: "a4b60d32-905c-4dfe-b8b3-49080372e76b") : failed to sync secret cache: timed out waiting for the condition Dec 15 06:53:37 crc kubenswrapper[4876]: E1215 06:53:37.675839 4876 configmap.go:193] Couldn't get configMap openshift-controller-manager/openshift-global-ca: failed to sync configmap cache: timed out waiting for the condition Dec 15 06:53:37 crc kubenswrapper[4876]: E1215 06:53:37.675943 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-proxy-ca-bundles podName:b020cb85-67cf-46ad-8909-823c4a13376e nodeName:}" failed. No retries permitted until 2025-12-15 06:53:38.175921639 +0000 UTC m=+143.747064600 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-ca-bundles" (UniqueName: "kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-proxy-ca-bundles") pod "controller-manager-879f6c89f-6pnt6" (UID: "b020cb85-67cf-46ad-8909-823c4a13376e") : failed to sync configmap cache: timed out waiting for the condition Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.680500 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.699082 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.719642 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.740055 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.759677 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.780429 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.800085 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.819443 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.839614 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.859018 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.878989 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.900277 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.919793 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.951292 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.958779 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.980513 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 15 06:53:37 crc kubenswrapper[4876]: I1215 06:53:37.999915 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.020714 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.039985 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.059981 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.080397 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.100127 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.118701 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.139856 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.158888 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.179307 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.193226 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4b60d32-905c-4dfe-b8b3-49080372e76b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mhrtf\" (UID: \"a4b60d32-905c-4dfe-b8b3-49080372e76b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.193688 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.194028 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-client-ca\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.194607 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b020cb85-67cf-46ad-8909-823c4a13376e-serving-cert\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.194917 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-config\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.200439 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.219164 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.259478 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.279020 4876 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.300311 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.321530 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.339530 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.359527 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.378593 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.399755 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.419296 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.471667 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8xrh\" (UniqueName: \"kubernetes.io/projected/5fbba828-5587-4284-b497-12d70cf1b44f-kube-api-access-s8xrh\") pod \"openshift-controller-manager-operator-756b6f6bc6-2ngb7\" (UID: \"5fbba828-5587-4284-b497-12d70cf1b44f\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.474960 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bhkz\" (UniqueName: \"kubernetes.io/projected/b07093d2-2832-4d8b-b80b-d0caf910e6b9-kube-api-access-9bhkz\") pod \"apiserver-7bbb656c7d-vlh5c\" (UID: \"b07093d2-2832-4d8b-b80b-d0caf910e6b9\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.494915 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgt4g\" (UniqueName: \"kubernetes.io/projected/69946ceb-62be-4cf4-a350-d5e37ad74eaf-kube-api-access-lgt4g\") pod \"cluster-samples-operator-665b6dd947-9cvz2\" (UID: \"69946ceb-62be-4cf4-a350-d5e37ad74eaf\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.519625 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7lvq\" (UniqueName: \"kubernetes.io/projected/a4b60d32-905c-4dfe-b8b3-49080372e76b-kube-api-access-r7lvq\") pod \"openshift-apiserver-operator-796bbdcf4f-mhrtf\" (UID: \"a4b60d32-905c-4dfe-b8b3-49080372e76b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.535011 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lj8dx\" (UniqueName: \"kubernetes.io/projected/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-kube-api-access-lj8dx\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.556704 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbvsx\" (UniqueName: \"kubernetes.io/projected/4771573b-f753-4de4-bfb5-7fe3608a0b53-kube-api-access-hbvsx\") pod \"console-f9d7485db-x6r8c\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.572898 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmnw2\" (UniqueName: \"kubernetes.io/projected/6405a4d3-c1bb-4cdd-99ae-fd1382abbd01-kube-api-access-bmnw2\") pod \"authentication-operator-69f744f599-dr96f\" (UID: \"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.577950 4876 request.go:700] Waited for 1.905397886s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-config-operator/serviceaccounts/openshift-config-operator/token Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.594725 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlkh2\" (UniqueName: \"kubernetes.io/projected/62176145-6d99-48fb-866e-2c248f531ae5-kube-api-access-dlkh2\") pod \"openshift-config-operator-7777fb866f-f28h8\" (UID: \"62176145-6d99-48fb-866e-2c248f531ae5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.598724 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.639353 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh8lr\" (UniqueName: \"kubernetes.io/projected/e6f138ff-658f-41f4-8067-72f1882a25a5-kube-api-access-bh8lr\") pod \"oauth-openshift-558db77b4-bkzhk\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.648715 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.659958 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.663680 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvhhp\" (UniqueName: \"kubernetes.io/projected/a18402db-2952-4cfb-bf3a-1f22f23483de-kube-api-access-lvhhp\") pod \"apiserver-76f77b778f-kqs9w\" (UID: \"a18402db-2952-4cfb-bf3a-1f22f23483de\") " pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.666896 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.673576 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4ec0397d-8db5-44e7-a654-bd08ec4e7ed4-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-x94pn\" (UID: \"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.686674 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.701697 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.702161 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7cwx\" (UniqueName: \"kubernetes.io/projected/ca3c2acc-5299-4f75-a891-e018580e62ee-kube-api-access-w7cwx\") pod \"route-controller-manager-6576b87f9c-ww7lh\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.718692 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r78gp\" (UniqueName: \"kubernetes.io/projected/c6defe56-040f-4e05-9b36-28b7d70481df-kube-api-access-r78gp\") pod \"machine-api-operator-5694c8668f-fztl2\" (UID: \"c6defe56-040f-4e05-9b36-28b7d70481df\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.719535 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.733937 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phfvg\" (UniqueName: \"kubernetes.io/projected/0e84c5be-bb73-42da-b000-529df9355a16-kube-api-access-phfvg\") pod \"dns-operator-744455d44c-48ggq\" (UID: \"0e84c5be-bb73-42da-b000-529df9355a16\") " pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.742755 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.746359 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.760337 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.771031 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.787890 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.800778 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.844325 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czhnv\" (UniqueName: \"kubernetes.io/projected/be980b8d-1606-4294-8263-1df678c279fb-kube-api-access-czhnv\") pod \"etcd-operator-b45778765-t872l\" (UID: \"be980b8d-1606-4294-8263-1df678c279fb\") " pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.860361 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.862631 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c"] Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.866557 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.873565 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4b60d32-905c-4dfe-b8b3-49080372e76b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mhrtf\" (UID: \"a4b60d32-905c-4dfe-b8b3-49080372e76b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.888124 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.896349 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.899555 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.914006 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b020cb85-67cf-46ad-8909-823c4a13376e-serving-cert\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.919789 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.927051 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-config\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.934318 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.941508 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.961373 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.968255 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8c2cb\" (UniqueName: \"kubernetes.io/projected/b020cb85-67cf-46ad-8909-823c4a13376e-kube-api-access-8c2cb\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.978386 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.981759 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 15 06:53:38 crc kubenswrapper[4876]: I1215 06:53:38.986043 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-client-ca\") pod \"controller-manager-879f6c89f-6pnt6\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.001320 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108143 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d7464df4-d82d-44d4-9019-04cbc4cd7dc0-proxy-tls\") pod \"machine-config-controller-84d6567774-lzgq9\" (UID: \"d7464df4-d82d-44d4-9019-04cbc4cd7dc0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108206 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4c9j\" (UniqueName: \"kubernetes.io/projected/eb940030-24f8-4f6d-990b-7787f4eacf13-kube-api-access-x4c9j\") pod \"multus-admission-controller-857f4d67dd-znkl5\" (UID: \"eb940030-24f8-4f6d-990b-7787f4eacf13\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108236 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/bfe06425-73ac-4753-bdba-7d1f1797b4c6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kk5bp\" (UID: \"bfe06425-73ac-4753-bdba-7d1f1797b4c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108277 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8zbb\" (UniqueName: \"kubernetes.io/projected/8c73655f-0c0a-49a9-83a6-b8e743618809-kube-api-access-t8zbb\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108317 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8c73655f-0c0a-49a9-83a6-b8e743618809-webhook-cert\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108342 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d32f3652-3eaf-46ee-a25c-378d553251a7-config\") pod \"service-ca-operator-777779d784-xlh8f\" (UID: \"d32f3652-3eaf-46ee-a25c-378d553251a7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108364 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7e1616af-82dc-469c-8ecb-849c3122fe02-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-w2r68\" (UID: \"7e1616af-82dc-469c-8ecb-849c3122fe02\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108419 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvb48\" (UniqueName: \"kubernetes.io/projected/df271eaa-2f40-4651-9942-5c6e535654d6-kube-api-access-bvb48\") pod \"collect-profiles-29429685-bzx4c\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108443 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d7464df4-d82d-44d4-9019-04cbc4cd7dc0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-lzgq9\" (UID: \"d7464df4-d82d-44d4-9019-04cbc4cd7dc0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108465 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/66c67ba0-faba-44ed-afc3-6dfcad78a13b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hz5lw\" (UID: \"66c67ba0-faba-44ed-afc3-6dfcad78a13b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108488 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a95b0c4e-e837-4693-96a6-78074ff5278d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-95pzl\" (UID: \"a95b0c4e-e837-4693-96a6-78074ff5278d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108526 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d32f3652-3eaf-46ee-a25c-378d553251a7-serving-cert\") pod \"service-ca-operator-777779d784-xlh8f\" (UID: \"d32f3652-3eaf-46ee-a25c-378d553251a7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108918 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e1616af-82dc-469c-8ecb-849c3122fe02-config\") pod \"kube-apiserver-operator-766d6c64bb-w2r68\" (UID: \"7e1616af-82dc-469c-8ecb-849c3122fe02\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.108969 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a7ef4763-aa60-4f6d-8066-86746ef01427-srv-cert\") pod \"catalog-operator-68c6474976-9gltw\" (UID: \"a7ef4763-aa60-4f6d-8066-86746ef01427\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109000 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e1616af-82dc-469c-8ecb-849c3122fe02-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-w2r68\" (UID: \"7e1616af-82dc-469c-8ecb-849c3122fe02\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109421 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e63fa2a2-b391-4dc5-a3ef-12eed55a4e99-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h78rt\" (UID: \"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109486 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8c73655f-0c0a-49a9-83a6-b8e743618809-tmpfs\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109512 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq2np\" (UniqueName: \"kubernetes.io/projected/439195f3-d325-42cc-a3cb-094ddf386f33-kube-api-access-kq2np\") pod \"ingress-canary-x2jnz\" (UID: \"439195f3-d325-42cc-a3cb-094ddf386f33\") " pod="openshift-ingress-canary/ingress-canary-x2jnz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109535 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5pgx\" (UniqueName: \"kubernetes.io/projected/d7464df4-d82d-44d4-9019-04cbc4cd7dc0-kube-api-access-t5pgx\") pod \"machine-config-controller-84d6567774-lzgq9\" (UID: \"d7464df4-d82d-44d4-9019-04cbc4cd7dc0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109566 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-images\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109635 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gzkh\" (UniqueName: \"kubernetes.io/projected/62cc24e0-8017-4498-b745-b196f50e104f-kube-api-access-6gzkh\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109663 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5jbk\" (UniqueName: \"kubernetes.io/projected/47b07d03-5c39-4966-8850-ed81bcfc8e94-kube-api-access-q5jbk\") pod \"marketplace-operator-79b997595-kvwdp\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109687 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc85h\" (UniqueName: \"kubernetes.io/projected/68787888-eea8-4079-89a0-6718cabcd14b-kube-api-access-jc85h\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109734 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a95b0c4e-e837-4693-96a6-78074ff5278d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-95pzl\" (UID: \"a95b0c4e-e837-4693-96a6-78074ff5278d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.109820 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q55wq\" (UniqueName: \"kubernetes.io/projected/916c0070-57cd-49dd-bf38-97bfd7bc949f-kube-api-access-q55wq\") pod \"migrator-59844c95c7-g6q46\" (UID: \"916c0070-57cd-49dd-bf38-97bfd7bc949f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.110227 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-bound-sa-token\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.110412 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/68787888-eea8-4079-89a0-6718cabcd14b-trusted-ca\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.110549 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3df8165-868f-49d9-a6ab-23aa9ce6e544-installation-pull-secrets\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.110711 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-default-certificate\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.110783 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-trusted-ca\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.110831 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l86t\" (UniqueName: \"kubernetes.io/projected/6bfe1983-046b-4e1f-a36a-c73834069529-kube-api-access-9l86t\") pod \"service-ca-9c57cc56f-fqp2q\" (UID: \"6bfe1983-046b-4e1f-a36a-c73834069529\") " pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.110888 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6ntj\" (UniqueName: \"kubernetes.io/projected/a7ef4763-aa60-4f6d-8066-86746ef01427-kube-api-access-m6ntj\") pod \"catalog-operator-68c6474976-9gltw\" (UID: \"a7ef4763-aa60-4f6d-8066-86746ef01427\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.110958 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kvwdp\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.111002 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df271eaa-2f40-4651-9942-5c6e535654d6-config-volume\") pod \"collect-profiles-29429685-bzx4c\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.111086 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.111156 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6bfe1983-046b-4e1f-a36a-c73834069529-signing-cabundle\") pod \"service-ca-9c57cc56f-fqp2q\" (UID: \"6bfe1983-046b-4e1f-a36a-c73834069529\") " pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.111186 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kvwdp\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.111210 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63114fa-711e-4cb6-b222-d0e317e19488-config\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.111237 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e63114fa-711e-4cb6-b222-d0e317e19488-serving-cert\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.111259 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vpdm\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-kube-api-access-5vpdm\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.111666 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb940030-24f8-4f6d-990b-7787f4eacf13-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-znkl5\" (UID: \"eb940030-24f8-4f6d-990b-7787f4eacf13\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.111732 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47zbj\" (UniqueName: \"kubernetes.io/projected/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-kube-api-access-47zbj\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.111759 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8fdd\" (UniqueName: \"kubernetes.io/projected/66c67ba0-faba-44ed-afc3-6dfcad78a13b-kube-api-access-m8fdd\") pod \"olm-operator-6b444d44fb-hz5lw\" (UID: \"66c67ba0-faba-44ed-afc3-6dfcad78a13b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112371 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/62cc24e0-8017-4498-b745-b196f50e104f-machine-approver-tls\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112405 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-metrics-certs\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112430 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2hxm\" (UniqueName: \"kubernetes.io/projected/c89ff00c-fdde-4c6d-b849-43061c3c6b2c-kube-api-access-l2hxm\") pod \"package-server-manager-789f6589d5-x277g\" (UID: \"c89ff00c-fdde-4c6d-b849-43061c3c6b2c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112458 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/df271eaa-2f40-4651-9942-5c6e535654d6-secret-volume\") pod \"collect-profiles-29429685-bzx4c\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112568 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-proxy-tls\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112595 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a95b0c4e-e837-4693-96a6-78074ff5278d-config\") pod \"kube-controller-manager-operator-78b949d7b-95pzl\" (UID: \"a95b0c4e-e837-4693-96a6-78074ff5278d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112668 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/62cc24e0-8017-4498-b745-b196f50e104f-auth-proxy-config\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112692 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/439195f3-d325-42cc-a3cb-094ddf386f33-cert\") pod \"ingress-canary-x2jnz\" (UID: \"439195f3-d325-42cc-a3cb-094ddf386f33\") " pod="openshift-ingress-canary/ingress-canary-x2jnz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112729 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c69b657a-1f19-4576-b07d-9ae5983c0d86-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l7cgr\" (UID: \"c69b657a-1f19-4576-b07d-9ae5983c0d86\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112752 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a7ef4763-aa60-4f6d-8066-86746ef01427-profile-collector-cert\") pod \"catalog-operator-68c6474976-9gltw\" (UID: \"a7ef4763-aa60-4f6d-8066-86746ef01427\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112843 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6bfe1983-046b-4e1f-a36a-c73834069529-signing-key\") pod \"service-ca-9c57cc56f-fqp2q\" (UID: \"6bfe1983-046b-4e1f-a36a-c73834069529\") " pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112870 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63fa2a2-b391-4dc5-a3ef-12eed55a4e99-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h78rt\" (UID: \"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112910 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nftv\" (UniqueName: \"kubernetes.io/projected/edee3ad2-c1e0-4331-a30b-71e541f430af-kube-api-access-7nftv\") pod \"downloads-7954f5f757-cqvrf\" (UID: \"edee3ad2-c1e0-4331-a30b-71e541f430af\") " pod="openshift-console/downloads-7954f5f757-cqvrf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112934 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/68787888-eea8-4079-89a0-6718cabcd14b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112967 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.112995 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e63fa2a2-b391-4dc5-a3ef-12eed55a4e99-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h78rt\" (UID: \"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.113160 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62cc24e0-8017-4498-b745-b196f50e104f-config\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.113198 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-stats-auth\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.113218 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/68787888-eea8-4079-89a0-6718cabcd14b-metrics-tls\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.113259 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c69b657a-1f19-4576-b07d-9ae5983c0d86-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l7cgr\" (UID: \"c69b657a-1f19-4576-b07d-9ae5983c0d86\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.113279 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e63114fa-711e-4cb6-b222-d0e317e19488-trusted-ca\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.113327 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-tls\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.113357 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-service-ca-bundle\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.114003 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.115353 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3df8165-868f-49d9-a6ab-23aa9ce6e544-ca-trust-extracted\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.115396 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94f7g\" (UniqueName: \"kubernetes.io/projected/d32f3652-3eaf-46ee-a25c-378d553251a7-kube-api-access-94f7g\") pod \"service-ca-operator-777779d784-xlh8f\" (UID: \"d32f3652-3eaf-46ee-a25c-378d553251a7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.115496 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snvdp\" (UniqueName: \"kubernetes.io/projected/e63114fa-711e-4cb6-b222-d0e317e19488-kube-api-access-snvdp\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.115524 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvjdn\" (UniqueName: \"kubernetes.io/projected/c69b657a-1f19-4576-b07d-9ae5983c0d86-kube-api-access-zvjdn\") pod \"kube-storage-version-migrator-operator-b67b599dd-l7cgr\" (UID: \"c69b657a-1f19-4576-b07d-9ae5983c0d86\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.115663 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtgrt\" (UniqueName: \"kubernetes.io/projected/bfe06425-73ac-4753-bdba-7d1f1797b4c6-kube-api-access-rtgrt\") pod \"control-plane-machine-set-operator-78cbb6b69f-kk5bp\" (UID: \"bfe06425-73ac-4753-bdba-7d1f1797b4c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.115792 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-certificates\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.115949 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c89ff00c-fdde-4c6d-b849-43061c3c6b2c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x277g\" (UID: \"c89ff00c-fdde-4c6d-b849-43061c3c6b2c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.116056 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.116312 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8c73655f-0c0a-49a9-83a6-b8e743618809-apiservice-cert\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.116392 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/66c67ba0-faba-44ed-afc3-6dfcad78a13b-srv-cert\") pod \"olm-operator-6b444d44fb-hz5lw\" (UID: \"66c67ba0-faba-44ed-afc3-6dfcad78a13b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.116473 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhg8v\" (UniqueName: \"kubernetes.io/projected/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-kube-api-access-jhg8v\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: E1215 06:53:39.117155 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:39.617135426 +0000 UTC m=+145.188278347 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.127366 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.143632 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fztl2"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.170257 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-x6r8c"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.174732 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-dr96f"] Dec 15 06:53:39 crc kubenswrapper[4876]: W1215 06:53:39.181263 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6defe56_040f_4e05_9b36_28b7d70481df.slice/crio-758a4c826c01bce781bdbce389a855ed399cb7e51a5e9dd832af3bce7d8fa172 WatchSource:0}: Error finding container 758a4c826c01bce781bdbce389a855ed399cb7e51a5e9dd832af3bce7d8fa172: Status 404 returned error can't find the container with id 758a4c826c01bce781bdbce389a855ed399cb7e51a5e9dd832af3bce7d8fa172 Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.188859 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7"] Dec 15 06:53:39 crc kubenswrapper[4876]: W1215 06:53:39.195533 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4771573b_f753_4de4_bfb5_7fe3608a0b53.slice/crio-d0cb08802b7a56540e1a9f04d0e1677eea87ca0a806d778deda1039202b455a2 WatchSource:0}: Error finding container d0cb08802b7a56540e1a9f04d0e1677eea87ca0a806d778deda1039202b455a2: Status 404 returned error can't find the container with id d0cb08802b7a56540e1a9f04d0e1677eea87ca0a806d778deda1039202b455a2 Dec 15 06:53:39 crc kubenswrapper[4876]: W1215 06:53:39.200993 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6405a4d3_c1bb_4cdd_99ae_fd1382abbd01.slice/crio-4dcd73eb88f9425ba947792635e1fbc302154e443e3ac378b50bf1ce9307422e WatchSource:0}: Error finding container 4dcd73eb88f9425ba947792635e1fbc302154e443e3ac378b50bf1ce9307422e: Status 404 returned error can't find the container with id 4dcd73eb88f9425ba947792635e1fbc302154e443e3ac378b50bf1ce9307422e Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.204688 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217049 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217234 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6bfe1983-046b-4e1f-a36a-c73834069529-signing-cabundle\") pod \"service-ca-9c57cc56f-fqp2q\" (UID: \"6bfe1983-046b-4e1f-a36a-c73834069529\") " pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217272 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kvwdp\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217315 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63114fa-711e-4cb6-b222-d0e317e19488-config\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217336 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e63114fa-711e-4cb6-b222-d0e317e19488-serving-cert\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217358 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vpdm\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-kube-api-access-5vpdm\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217380 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb940030-24f8-4f6d-990b-7787f4eacf13-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-znkl5\" (UID: \"eb940030-24f8-4f6d-990b-7787f4eacf13\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217408 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9h4g\" (UniqueName: \"kubernetes.io/projected/239647d8-4ba9-475a-872b-2b308b1440da-kube-api-access-j9h4g\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217433 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47zbj\" (UniqueName: \"kubernetes.io/projected/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-kube-api-access-47zbj\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217494 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8fdd\" (UniqueName: \"kubernetes.io/projected/66c67ba0-faba-44ed-afc3-6dfcad78a13b-kube-api-access-m8fdd\") pod \"olm-operator-6b444d44fb-hz5lw\" (UID: \"66c67ba0-faba-44ed-afc3-6dfcad78a13b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217518 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/62cc24e0-8017-4498-b745-b196f50e104f-machine-approver-tls\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217539 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-metrics-certs\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217562 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2hxm\" (UniqueName: \"kubernetes.io/projected/c89ff00c-fdde-4c6d-b849-43061c3c6b2c-kube-api-access-l2hxm\") pod \"package-server-manager-789f6589d5-x277g\" (UID: \"c89ff00c-fdde-4c6d-b849-43061c3c6b2c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217587 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/df271eaa-2f40-4651-9942-5c6e535654d6-secret-volume\") pod \"collect-profiles-29429685-bzx4c\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217638 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-proxy-tls\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217692 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a95b0c4e-e837-4693-96a6-78074ff5278d-config\") pod \"kube-controller-manager-operator-78b949d7b-95pzl\" (UID: \"a95b0c4e-e837-4693-96a6-78074ff5278d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:39 crc kubenswrapper[4876]: W1215 06:53:39.217694 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fbba828_5587_4284_b497_12d70cf1b44f.slice/crio-ecbe13ccf296a1bc72997c840f20b41e247cb04764bdd9b723f884130cfbd86b WatchSource:0}: Error finding container ecbe13ccf296a1bc72997c840f20b41e247cb04764bdd9b723f884130cfbd86b: Status 404 returned error can't find the container with id ecbe13ccf296a1bc72997c840f20b41e247cb04764bdd9b723f884130cfbd86b Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217718 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc-metrics-tls\") pod \"dns-default-qxz8x\" (UID: \"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc\") " pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217752 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/62cc24e0-8017-4498-b745-b196f50e104f-auth-proxy-config\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217781 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/439195f3-d325-42cc-a3cb-094ddf386f33-cert\") pod \"ingress-canary-x2jnz\" (UID: \"439195f3-d325-42cc-a3cb-094ddf386f33\") " pod="openshift-ingress-canary/ingress-canary-x2jnz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217801 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc-config-volume\") pod \"dns-default-qxz8x\" (UID: \"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc\") " pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217831 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c69b657a-1f19-4576-b07d-9ae5983c0d86-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l7cgr\" (UID: \"c69b657a-1f19-4576-b07d-9ae5983c0d86\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217859 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a7ef4763-aa60-4f6d-8066-86746ef01427-profile-collector-cert\") pod \"catalog-operator-68c6474976-9gltw\" (UID: \"a7ef4763-aa60-4f6d-8066-86746ef01427\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217907 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6bfe1983-046b-4e1f-a36a-c73834069529-signing-key\") pod \"service-ca-9c57cc56f-fqp2q\" (UID: \"6bfe1983-046b-4e1f-a36a-c73834069529\") " pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217931 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/10fa0e8c-4318-4e91-9a17-98b12f8622b3-certs\") pod \"machine-config-server-gnrsz\" (UID: \"10fa0e8c-4318-4e91-9a17-98b12f8622b3\") " pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217954 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63fa2a2-b391-4dc5-a3ef-12eed55a4e99-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h78rt\" (UID: \"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.217976 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nftv\" (UniqueName: \"kubernetes.io/projected/edee3ad2-c1e0-4331-a30b-71e541f430af-kube-api-access-7nftv\") pod \"downloads-7954f5f757-cqvrf\" (UID: \"edee3ad2-c1e0-4331-a30b-71e541f430af\") " pod="openshift-console/downloads-7954f5f757-cqvrf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218002 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/68787888-eea8-4079-89a0-6718cabcd14b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218044 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e63fa2a2-b391-4dc5-a3ef-12eed55a4e99-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h78rt\" (UID: \"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218072 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62cc24e0-8017-4498-b745-b196f50e104f-config\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218092 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-stats-auth\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218139 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/68787888-eea8-4079-89a0-6718cabcd14b-metrics-tls\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218160 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-plugins-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218195 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqfmz\" (UniqueName: \"kubernetes.io/projected/10fa0e8c-4318-4e91-9a17-98b12f8622b3-kube-api-access-zqfmz\") pod \"machine-config-server-gnrsz\" (UID: \"10fa0e8c-4318-4e91-9a17-98b12f8622b3\") " pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218233 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c69b657a-1f19-4576-b07d-9ae5983c0d86-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l7cgr\" (UID: \"c69b657a-1f19-4576-b07d-9ae5983c0d86\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218257 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e63114fa-711e-4cb6-b222-d0e317e19488-trusted-ca\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218305 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-tls\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218327 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-service-ca-bundle\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218347 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3df8165-868f-49d9-a6ab-23aa9ce6e544-ca-trust-extracted\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218370 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94f7g\" (UniqueName: \"kubernetes.io/projected/d32f3652-3eaf-46ee-a25c-378d553251a7-kube-api-access-94f7g\") pod \"service-ca-operator-777779d784-xlh8f\" (UID: \"d32f3652-3eaf-46ee-a25c-378d553251a7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218390 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snvdp\" (UniqueName: \"kubernetes.io/projected/e63114fa-711e-4cb6-b222-d0e317e19488-kube-api-access-snvdp\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218411 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvjdn\" (UniqueName: \"kubernetes.io/projected/c69b657a-1f19-4576-b07d-9ae5983c0d86-kube-api-access-zvjdn\") pod \"kube-storage-version-migrator-operator-b67b599dd-l7cgr\" (UID: \"c69b657a-1f19-4576-b07d-9ae5983c0d86\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218445 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtgrt\" (UniqueName: \"kubernetes.io/projected/bfe06425-73ac-4753-bdba-7d1f1797b4c6-kube-api-access-rtgrt\") pod \"control-plane-machine-set-operator-78cbb6b69f-kk5bp\" (UID: \"bfe06425-73ac-4753-bdba-7d1f1797b4c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218469 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/10fa0e8c-4318-4e91-9a17-98b12f8622b3-node-bootstrap-token\") pod \"machine-config-server-gnrsz\" (UID: \"10fa0e8c-4318-4e91-9a17-98b12f8622b3\") " pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218503 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-certificates\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218556 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c89ff00c-fdde-4c6d-b849-43061c3c6b2c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x277g\" (UID: \"c89ff00c-fdde-4c6d-b849-43061c3c6b2c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218582 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8c73655f-0c0a-49a9-83a6-b8e743618809-apiservice-cert\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218605 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/66c67ba0-faba-44ed-afc3-6dfcad78a13b-srv-cert\") pod \"olm-operator-6b444d44fb-hz5lw\" (UID: \"66c67ba0-faba-44ed-afc3-6dfcad78a13b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218642 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhg8v\" (UniqueName: \"kubernetes.io/projected/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-kube-api-access-jhg8v\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218678 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d7464df4-d82d-44d4-9019-04cbc4cd7dc0-proxy-tls\") pod \"machine-config-controller-84d6567774-lzgq9\" (UID: \"d7464df4-d82d-44d4-9019-04cbc4cd7dc0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218718 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4c9j\" (UniqueName: \"kubernetes.io/projected/eb940030-24f8-4f6d-990b-7787f4eacf13-kube-api-access-x4c9j\") pod \"multus-admission-controller-857f4d67dd-znkl5\" (UID: \"eb940030-24f8-4f6d-990b-7787f4eacf13\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218749 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/bfe06425-73ac-4753-bdba-7d1f1797b4c6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kk5bp\" (UID: \"bfe06425-73ac-4753-bdba-7d1f1797b4c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218786 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8zbb\" (UniqueName: \"kubernetes.io/projected/8c73655f-0c0a-49a9-83a6-b8e743618809-kube-api-access-t8zbb\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218812 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8c73655f-0c0a-49a9-83a6-b8e743618809-webhook-cert\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218835 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d32f3652-3eaf-46ee-a25c-378d553251a7-config\") pod \"service-ca-operator-777779d784-xlh8f\" (UID: \"d32f3652-3eaf-46ee-a25c-378d553251a7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218859 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7e1616af-82dc-469c-8ecb-849c3122fe02-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-w2r68\" (UID: \"7e1616af-82dc-469c-8ecb-849c3122fe02\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218883 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-mountpoint-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218909 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvb48\" (UniqueName: \"kubernetes.io/projected/df271eaa-2f40-4651-9942-5c6e535654d6-kube-api-access-bvb48\") pod \"collect-profiles-29429685-bzx4c\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218967 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/66c67ba0-faba-44ed-afc3-6dfcad78a13b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hz5lw\" (UID: \"66c67ba0-faba-44ed-afc3-6dfcad78a13b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.218995 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a95b0c4e-e837-4693-96a6-78074ff5278d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-95pzl\" (UID: \"a95b0c4e-e837-4693-96a6-78074ff5278d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219021 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d7464df4-d82d-44d4-9019-04cbc4cd7dc0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-lzgq9\" (UID: \"d7464df4-d82d-44d4-9019-04cbc4cd7dc0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219047 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d32f3652-3eaf-46ee-a25c-378d553251a7-serving-cert\") pod \"service-ca-operator-777779d784-xlh8f\" (UID: \"d32f3652-3eaf-46ee-a25c-378d553251a7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219071 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-socket-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219146 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e1616af-82dc-469c-8ecb-849c3122fe02-config\") pod \"kube-apiserver-operator-766d6c64bb-w2r68\" (UID: \"7e1616af-82dc-469c-8ecb-849c3122fe02\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219173 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a7ef4763-aa60-4f6d-8066-86746ef01427-srv-cert\") pod \"catalog-operator-68c6474976-9gltw\" (UID: \"a7ef4763-aa60-4f6d-8066-86746ef01427\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219211 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e1616af-82dc-469c-8ecb-849c3122fe02-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-w2r68\" (UID: \"7e1616af-82dc-469c-8ecb-849c3122fe02\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219277 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e63fa2a2-b391-4dc5-a3ef-12eed55a4e99-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h78rt\" (UID: \"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219318 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8c73655f-0c0a-49a9-83a6-b8e743618809-tmpfs\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219344 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq2np\" (UniqueName: \"kubernetes.io/projected/439195f3-d325-42cc-a3cb-094ddf386f33-kube-api-access-kq2np\") pod \"ingress-canary-x2jnz\" (UID: \"439195f3-d325-42cc-a3cb-094ddf386f33\") " pod="openshift-ingress-canary/ingress-canary-x2jnz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219369 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5pgx\" (UniqueName: \"kubernetes.io/projected/d7464df4-d82d-44d4-9019-04cbc4cd7dc0-kube-api-access-t5pgx\") pod \"machine-config-controller-84d6567774-lzgq9\" (UID: \"d7464df4-d82d-44d4-9019-04cbc4cd7dc0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219396 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gzkh\" (UniqueName: \"kubernetes.io/projected/62cc24e0-8017-4498-b745-b196f50e104f-kube-api-access-6gzkh\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219433 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5jbk\" (UniqueName: \"kubernetes.io/projected/47b07d03-5c39-4966-8850-ed81bcfc8e94-kube-api-access-q5jbk\") pod \"marketplace-operator-79b997595-kvwdp\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219457 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-images\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219490 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc85h\" (UniqueName: \"kubernetes.io/projected/68787888-eea8-4079-89a0-6718cabcd14b-kube-api-access-jc85h\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219529 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a95b0c4e-e837-4693-96a6-78074ff5278d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-95pzl\" (UID: \"a95b0c4e-e837-4693-96a6-78074ff5278d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219556 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q55wq\" (UniqueName: \"kubernetes.io/projected/916c0070-57cd-49dd-bf38-97bfd7bc949f-kube-api-access-q55wq\") pod \"migrator-59844c95c7-g6q46\" (UID: \"916c0070-57cd-49dd-bf38-97bfd7bc949f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.219579 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-bound-sa-token\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220495 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/68787888-eea8-4079-89a0-6718cabcd14b-trusted-ca\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220589 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3df8165-868f-49d9-a6ab-23aa9ce6e544-installation-pull-secrets\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220613 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-default-certificate\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220640 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-trusted-ca\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220708 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-registration-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220753 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l86t\" (UniqueName: \"kubernetes.io/projected/6bfe1983-046b-4e1f-a36a-c73834069529-kube-api-access-9l86t\") pod \"service-ca-9c57cc56f-fqp2q\" (UID: \"6bfe1983-046b-4e1f-a36a-c73834069529\") " pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220779 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6ntj\" (UniqueName: \"kubernetes.io/projected/a7ef4763-aa60-4f6d-8066-86746ef01427-kube-api-access-m6ntj\") pod \"catalog-operator-68c6474976-9gltw\" (UID: \"a7ef4763-aa60-4f6d-8066-86746ef01427\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220809 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kvwdp\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220868 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df271eaa-2f40-4651-9942-5c6e535654d6-config-volume\") pod \"collect-profiles-29429685-bzx4c\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220898 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtq8k\" (UniqueName: \"kubernetes.io/projected/e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc-kube-api-access-jtq8k\") pod \"dns-default-qxz8x\" (UID: \"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc\") " pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220929 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.220963 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-csi-data-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.228548 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e63114fa-711e-4cb6-b222-d0e317e19488-trusted-ca\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.228682 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d7464df4-d82d-44d4-9019-04cbc4cd7dc0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-lzgq9\" (UID: \"d7464df4-d82d-44d4-9019-04cbc4cd7dc0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:39 crc kubenswrapper[4876]: E1215 06:53:39.230423 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:39.730318008 +0000 UTC m=+145.301460929 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.232258 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d32f3652-3eaf-46ee-a25c-378d553251a7-serving-cert\") pod \"service-ca-operator-777779d784-xlh8f\" (UID: \"d32f3652-3eaf-46ee-a25c-378d553251a7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.234905 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e1616af-82dc-469c-8ecb-849c3122fe02-config\") pod \"kube-apiserver-operator-766d6c64bb-w2r68\" (UID: \"7e1616af-82dc-469c-8ecb-849c3122fe02\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.235603 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-tls\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.235991 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3df8165-868f-49d9-a6ab-23aa9ce6e544-ca-trust-extracted\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.236164 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-metrics-certs\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.238162 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-certificates\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.241327 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d32f3652-3eaf-46ee-a25c-378d553251a7-config\") pod \"service-ca-operator-777779d784-xlh8f\" (UID: \"d32f3652-3eaf-46ee-a25c-378d553251a7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.241469 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/62cc24e0-8017-4498-b745-b196f50e104f-machine-approver-tls\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.243540 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/df271eaa-2f40-4651-9942-5c6e535654d6-secret-volume\") pod \"collect-profiles-29429685-bzx4c\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.249914 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-service-ca-bundle\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.247441 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/68787888-eea8-4079-89a0-6718cabcd14b-trusted-ca\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.244403 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c89ff00c-fdde-4c6d-b849-43061c3c6b2c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x277g\" (UID: \"c89ff00c-fdde-4c6d-b849-43061c3c6b2c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.251306 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/66c67ba0-faba-44ed-afc3-6dfcad78a13b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hz5lw\" (UID: \"66c67ba0-faba-44ed-afc3-6dfcad78a13b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.251939 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a7ef4763-aa60-4f6d-8066-86746ef01427-srv-cert\") pod \"catalog-operator-68c6474976-9gltw\" (UID: \"a7ef4763-aa60-4f6d-8066-86746ef01427\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.252377 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8c73655f-0c0a-49a9-83a6-b8e743618809-tmpfs\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.252420 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/bfe06425-73ac-4753-bdba-7d1f1797b4c6-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kk5bp\" (UID: \"bfe06425-73ac-4753-bdba-7d1f1797b4c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.253150 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d7464df4-d82d-44d4-9019-04cbc4cd7dc0-proxy-tls\") pod \"machine-config-controller-84d6567774-lzgq9\" (UID: \"d7464df4-d82d-44d4-9019-04cbc4cd7dc0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.253448 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-images\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.254750 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a95b0c4e-e837-4693-96a6-78074ff5278d-config\") pod \"kube-controller-manager-operator-78b949d7b-95pzl\" (UID: \"a95b0c4e-e837-4693-96a6-78074ff5278d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.261979 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62cc24e0-8017-4498-b745-b196f50e104f-config\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.262782 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7e1616af-82dc-469c-8ecb-849c3122fe02-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-w2r68\" (UID: \"7e1616af-82dc-469c-8ecb-849c3122fe02\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.264294 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/62cc24e0-8017-4498-b745-b196f50e104f-auth-proxy-config\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.266882 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/68787888-eea8-4079-89a0-6718cabcd14b-metrics-tls\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.267465 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-proxy-tls\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.267895 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e63fa2a2-b391-4dc5-a3ef-12eed55a4e99-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h78rt\" (UID: \"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.269144 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb940030-24f8-4f6d-990b-7787f4eacf13-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-znkl5\" (UID: \"eb940030-24f8-4f6d-990b-7787f4eacf13\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.269833 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c69b657a-1f19-4576-b07d-9ae5983c0d86-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l7cgr\" (UID: \"c69b657a-1f19-4576-b07d-9ae5983c0d86\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.271724 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-auth-proxy-config\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.303266 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a95b0c4e-e837-4693-96a6-78074ff5278d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-95pzl\" (UID: \"a95b0c4e-e837-4693-96a6-78074ff5278d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.306831 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8c73655f-0c0a-49a9-83a6-b8e743618809-apiservice-cert\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.307435 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df271eaa-2f40-4651-9942-5c6e535654d6-config-volume\") pod \"collect-profiles-29429685-bzx4c\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.310607 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/6bfe1983-046b-4e1f-a36a-c73834069529-signing-cabundle\") pod \"service-ca-9c57cc56f-fqp2q\" (UID: \"6bfe1983-046b-4e1f-a36a-c73834069529\") " pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.314320 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3df8165-868f-49d9-a6ab-23aa9ce6e544-installation-pull-secrets\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.318986 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.319037 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-48ggq"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.320234 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/439195f3-d325-42cc-a3cb-094ddf386f33-cert\") pod \"ingress-canary-x2jnz\" (UID: \"439195f3-d325-42cc-a3cb-094ddf386f33\") " pod="openshift-ingress-canary/ingress-canary-x2jnz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.321890 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-trusted-ca\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.322129 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kvwdp\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.324327 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c69b657a-1f19-4576-b07d-9ae5983c0d86-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l7cgr\" (UID: \"c69b657a-1f19-4576-b07d-9ae5983c0d86\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.325352 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63fa2a2-b391-4dc5-a3ef-12eed55a4e99-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h78rt\" (UID: \"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.325589 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-default-certificate\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.325980 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8c73655f-0c0a-49a9-83a6-b8e743618809-webhook-cert\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.295717 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63114fa-711e-4cb6-b222-d0e317e19488-config\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.327955 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/66c67ba0-faba-44ed-afc3-6dfcad78a13b-srv-cert\") pod \"olm-operator-6b444d44fb-hz5lw\" (UID: \"66c67ba0-faba-44ed-afc3-6dfcad78a13b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.328658 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e63114fa-711e-4cb6-b222-d0e317e19488-serving-cert\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.328771 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/6bfe1983-046b-4e1f-a36a-c73834069529-signing-key\") pod \"service-ca-9c57cc56f-fqp2q\" (UID: \"6bfe1983-046b-4e1f-a36a-c73834069529\") " pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.332923 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-mountpoint-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.333007 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-socket-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.333070 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-mountpoint-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.333127 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-f28h8"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.333528 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-socket-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.333630 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-registration-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.333801 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-registration-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.333848 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kvwdp\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.333007 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-stats-auth\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.333917 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtq8k\" (UniqueName: \"kubernetes.io/projected/e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc-kube-api-access-jtq8k\") pod \"dns-default-qxz8x\" (UID: \"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc\") " pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.333955 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-csi-data-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334137 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8fdd\" (UniqueName: \"kubernetes.io/projected/66c67ba0-faba-44ed-afc3-6dfcad78a13b-kube-api-access-m8fdd\") pod \"olm-operator-6b444d44fb-hz5lw\" (UID: \"66c67ba0-faba-44ed-afc3-6dfcad78a13b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334153 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-csi-data-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334034 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9h4g\" (UniqueName: \"kubernetes.io/projected/239647d8-4ba9-475a-872b-2b308b1440da-kube-api-access-j9h4g\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334463 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a7ef4763-aa60-4f6d-8066-86746ef01427-profile-collector-cert\") pod \"catalog-operator-68c6474976-9gltw\" (UID: \"a7ef4763-aa60-4f6d-8066-86746ef01427\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334496 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc-metrics-tls\") pod \"dns-default-qxz8x\" (UID: \"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc\") " pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334561 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc-config-volume\") pod \"dns-default-qxz8x\" (UID: \"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc\") " pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334594 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/10fa0e8c-4318-4e91-9a17-98b12f8622b3-certs\") pod \"machine-config-server-gnrsz\" (UID: \"10fa0e8c-4318-4e91-9a17-98b12f8622b3\") " pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334669 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334735 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-plugins-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334805 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqfmz\" (UniqueName: \"kubernetes.io/projected/10fa0e8c-4318-4e91-9a17-98b12f8622b3-kube-api-access-zqfmz\") pod \"machine-config-server-gnrsz\" (UID: \"10fa0e8c-4318-4e91-9a17-98b12f8622b3\") " pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.334899 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/10fa0e8c-4318-4e91-9a17-98b12f8622b3-node-bootstrap-token\") pod \"machine-config-server-gnrsz\" (UID: \"10fa0e8c-4318-4e91-9a17-98b12f8622b3\") " pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:39 crc kubenswrapper[4876]: E1215 06:53:39.335897 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:39.835882766 +0000 UTC m=+145.407025677 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.335902 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/239647d8-4ba9-475a-872b-2b308b1440da-plugins-dir\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.336205 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-bkzhk"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.336627 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc-config-volume\") pod \"dns-default-qxz8x\" (UID: \"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc\") " pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.338257 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.340550 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/10fa0e8c-4318-4e91-9a17-98b12f8622b3-node-bootstrap-token\") pod \"machine-config-server-gnrsz\" (UID: \"10fa0e8c-4318-4e91-9a17-98b12f8622b3\") " pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.342416 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/10fa0e8c-4318-4e91-9a17-98b12f8622b3-certs\") pod \"machine-config-server-gnrsz\" (UID: \"10fa0e8c-4318-4e91-9a17-98b12f8622b3\") " pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.354671 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhg8v\" (UniqueName: \"kubernetes.io/projected/631b168a-5a6c-4c8e-9854-52ab4c74d9a3-kube-api-access-jhg8v\") pod \"router-default-5444994796-p5nck\" (UID: \"631b168a-5a6c-4c8e-9854-52ab4c74d9a3\") " pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.356510 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc-metrics-tls\") pod \"dns-default-qxz8x\" (UID: \"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc\") " pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.358368 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94f7g\" (UniqueName: \"kubernetes.io/projected/d32f3652-3eaf-46ee-a25c-378d553251a7-kube-api-access-94f7g\") pod \"service-ca-operator-777779d784-xlh8f\" (UID: \"d32f3652-3eaf-46ee-a25c-378d553251a7\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.359076 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snvdp\" (UniqueName: \"kubernetes.io/projected/e63114fa-711e-4cb6-b222-d0e317e19488-kube-api-access-snvdp\") pod \"console-operator-58897d9998-q7skf\" (UID: \"e63114fa-711e-4cb6-b222-d0e317e19488\") " pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.362596 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2hxm\" (UniqueName: \"kubernetes.io/projected/c89ff00c-fdde-4c6d-b849-43061c3c6b2c-kube-api-access-l2hxm\") pod \"package-server-manager-789f6589d5-x277g\" (UID: \"c89ff00c-fdde-4c6d-b849-43061c3c6b2c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.374084 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvjdn\" (UniqueName: \"kubernetes.io/projected/c69b657a-1f19-4576-b07d-9ae5983c0d86-kube-api-access-zvjdn\") pod \"kube-storage-version-migrator-operator-b67b599dd-l7cgr\" (UID: \"c69b657a-1f19-4576-b07d-9ae5983c0d86\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.375537 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4c9j\" (UniqueName: \"kubernetes.io/projected/eb940030-24f8-4f6d-990b-7787f4eacf13-kube-api-access-x4c9j\") pod \"multus-admission-controller-857f4d67dd-znkl5\" (UID: \"eb940030-24f8-4f6d-990b-7787f4eacf13\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.406864 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtgrt\" (UniqueName: \"kubernetes.io/projected/bfe06425-73ac-4753-bdba-7d1f1797b4c6-kube-api-access-rtgrt\") pod \"control-plane-machine-set-operator-78cbb6b69f-kk5bp\" (UID: \"bfe06425-73ac-4753-bdba-7d1f1797b4c6\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.418809 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8zbb\" (UniqueName: \"kubernetes.io/projected/8c73655f-0c0a-49a9-83a6-b8e743618809-kube-api-access-t8zbb\") pod \"packageserver-d55dfcdfc-c8vw5\" (UID: \"8c73655f-0c0a-49a9-83a6-b8e743618809\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.423778 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.427513 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.435437 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:39 crc kubenswrapper[4876]: E1215 06:53:39.436111 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:39.936081854 +0000 UTC m=+145.507224765 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.439932 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kqs9w"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.463606 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q55wq\" (UniqueName: \"kubernetes.io/projected/916c0070-57cd-49dd-bf38-97bfd7bc949f-kube-api-access-q55wq\") pod \"migrator-59844c95c7-g6q46\" (UID: \"916c0070-57cd-49dd-bf38-97bfd7bc949f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.466306 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a95b0c4e-e837-4693-96a6-78074ff5278d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-95pzl\" (UID: \"a95b0c4e-e837-4693-96a6-78074ff5278d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.475837 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.481807 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-bound-sa-token\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.496367 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6pnt6"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.501206 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-t872l"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.501340 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.502425 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7e1616af-82dc-469c-8ecb-849c3122fe02-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-w2r68\" (UID: \"7e1616af-82dc-469c-8ecb-849c3122fe02\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.510573 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" Dec 15 06:53:39 crc kubenswrapper[4876]: W1215 06:53:39.514019 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4b60d32_905c_4dfe_b8b3_49080372e76b.slice/crio-a634d08e5996c909495bad719223a1779f55ec297eb04d0723d6f4ac06a40d47 WatchSource:0}: Error finding container a634d08e5996c909495bad719223a1779f55ec297eb04d0723d6f4ac06a40d47: Status 404 returned error can't find the container with id a634d08e5996c909495bad719223a1779f55ec297eb04d0723d6f4ac06a40d47 Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.514620 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.517545 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" event={"ID":"0e84c5be-bb73-42da-b000-529df9355a16","Type":"ContainerStarted","Data":"ab7d133ddc38cd022cf9102654e85292dfe7f27c5ad03cd48224206ebfc38a81"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.518159 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvb48\" (UniqueName: \"kubernetes.io/projected/df271eaa-2f40-4651-9942-5c6e535654d6-kube-api-access-bvb48\") pod \"collect-profiles-29429685-bzx4c\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.522143 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.529258 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" event={"ID":"5fbba828-5587-4284-b497-12d70cf1b44f","Type":"ContainerStarted","Data":"ecbe13ccf296a1bc72997c840f20b41e247cb04764bdd9b723f884130cfbd86b"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.537456 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.537424 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" event={"ID":"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01","Type":"ContainerStarted","Data":"4dcd73eb88f9425ba947792635e1fbc302154e443e3ac378b50bf1ce9307422e"} Dec 15 06:53:39 crc kubenswrapper[4876]: E1215 06:53:39.537826 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.037808835 +0000 UTC m=+145.608951746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.543121 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.549635 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:39 crc kubenswrapper[4876]: W1215 06:53:39.551309 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe980b8d_1606_4294_8263_1df678c279fb.slice/crio-2cd532df793d991b7b618d7ab95274fde75473ecaa881f0cedf79e47cc68e172 WatchSource:0}: Error finding container 2cd532df793d991b7b618d7ab95274fde75473ecaa881f0cedf79e47cc68e172: Status 404 returned error can't find the container with id 2cd532df793d991b7b618d7ab95274fde75473ecaa881f0cedf79e47cc68e172 Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.552391 4876 generic.go:334] "Generic (PLEG): container finished" podID="b07093d2-2832-4d8b-b80b-d0caf910e6b9" containerID="55fcb68ce67ebdcb1c75022c90d5f86b408b5fd33a9adef2a379b4985f733f0e" exitCode=0 Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.552467 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" event={"ID":"b07093d2-2832-4d8b-b80b-d0caf910e6b9","Type":"ContainerDied","Data":"55fcb68ce67ebdcb1c75022c90d5f86b408b5fd33a9adef2a379b4985f733f0e"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.552500 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" event={"ID":"b07093d2-2832-4d8b-b80b-d0caf910e6b9","Type":"ContainerStarted","Data":"e1a1473eed0dd93eeb8831813b3b68904091160da985123e38f9b22fe99889dc"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.555563 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" event={"ID":"a18402db-2952-4cfb-bf3a-1f22f23483de","Type":"ContainerStarted","Data":"b40d1906a1916542d384405818a907c269026c1eb4e06e0487a0727b0ff7b40e"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.557059 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" event={"ID":"69946ceb-62be-4cf4-a350-d5e37ad74eaf","Type":"ContainerStarted","Data":"de9bbe106f1d5ffc6b4c110b339aec48825b340bd88e360aa352f5fcadf8ad42"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.557231 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq2np\" (UniqueName: \"kubernetes.io/projected/439195f3-d325-42cc-a3cb-094ddf386f33-kube-api-access-kq2np\") pod \"ingress-canary-x2jnz\" (UID: \"439195f3-d325-42cc-a3cb-094ddf386f33\") " pod="openshift-ingress-canary/ingress-canary-x2jnz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.559153 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" event={"ID":"e6f138ff-658f-41f4-8067-72f1882a25a5","Type":"ContainerStarted","Data":"959659033ceac615e7b79e3e45a2eef5387be62a17119c49ca761ec77c60b571"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.561247 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x6r8c" event={"ID":"4771573b-f753-4de4-bfb5-7fe3608a0b53","Type":"ContainerStarted","Data":"d0cb08802b7a56540e1a9f04d0e1677eea87ca0a806d778deda1039202b455a2"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.562428 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" event={"ID":"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4","Type":"ContainerStarted","Data":"036b7cd5c3cd8667eb9dea33fade1dc6195c7569658acb21c9ac92477bcf1dc3"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.563726 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" event={"ID":"62176145-6d99-48fb-866e-2c248f531ae5","Type":"ContainerStarted","Data":"2261476d22e8e0e9c9f6d06a57e04eccc3571fdce24beea9f9e817d2bc3aa557"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.564799 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.565169 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" event={"ID":"ca3c2acc-5299-4f75-a891-e018580e62ee","Type":"ContainerStarted","Data":"cce4e74467fd38f4408a9952b643a313cb9115f140de0ccbc62f42303fcaafa5"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.572487 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.573297 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5pgx\" (UniqueName: \"kubernetes.io/projected/d7464df4-d82d-44d4-9019-04cbc4cd7dc0-kube-api-access-t5pgx\") pod \"machine-config-controller-84d6567774-lzgq9\" (UID: \"d7464df4-d82d-44d4-9019-04cbc4cd7dc0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.577969 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" event={"ID":"c6defe56-040f-4e05-9b36-28b7d70481df","Type":"ContainerStarted","Data":"537f0fb868411739f71a441d9be63cf7b8e75ff62f4ea5088771067073f0c6f8"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.578006 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" event={"ID":"c6defe56-040f-4e05-9b36-28b7d70481df","Type":"ContainerStarted","Data":"758a4c826c01bce781bdbce389a855ed399cb7e51a5e9dd832af3bce7d8fa172"} Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.580515 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.588993 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-x2jnz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.593672 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gzkh\" (UniqueName: \"kubernetes.io/projected/62cc24e0-8017-4498-b745-b196f50e104f-kube-api-access-6gzkh\") pod \"machine-approver-56656f9798-8nkpf\" (UID: \"62cc24e0-8017-4498-b745-b196f50e104f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.617729 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5jbk\" (UniqueName: \"kubernetes.io/projected/47b07d03-5c39-4966-8850-ed81bcfc8e94-kube-api-access-q5jbk\") pod \"marketplace-operator-79b997595-kvwdp\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.638787 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:39 crc kubenswrapper[4876]: E1215 06:53:39.639114 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.139065073 +0000 UTC m=+145.710207984 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.639610 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: E1215 06:53:39.640640 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.140631267 +0000 UTC m=+145.711774178 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.660217 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6ntj\" (UniqueName: \"kubernetes.io/projected/a7ef4763-aa60-4f6d-8066-86746ef01427-kube-api-access-m6ntj\") pod \"catalog-operator-68c6474976-9gltw\" (UID: \"a7ef4763-aa60-4f6d-8066-86746ef01427\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.672681 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nftv\" (UniqueName: \"kubernetes.io/projected/edee3ad2-c1e0-4331-a30b-71e541f430af-kube-api-access-7nftv\") pod \"downloads-7954f5f757-cqvrf\" (UID: \"edee3ad2-c1e0-4331-a30b-71e541f430af\") " pod="openshift-console/downloads-7954f5f757-cqvrf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.673318 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-cqvrf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.694664 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc85h\" (UniqueName: \"kubernetes.io/projected/68787888-eea8-4079-89a0-6718cabcd14b-kube-api-access-jc85h\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.696556 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47zbj\" (UniqueName: \"kubernetes.io/projected/412024f1-f06e-4af5-ab14-d2bf11d7c6d3-kube-api-access-47zbj\") pod \"machine-config-operator-74547568cd-k8hdp\" (UID: \"412024f1-f06e-4af5-ab14-d2bf11d7c6d3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.723005 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/68787888-eea8-4079-89a0-6718cabcd14b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-8p2gj\" (UID: \"68787888-eea8-4079-89a0-6718cabcd14b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.733001 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.741159 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.741528 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:39 crc kubenswrapper[4876]: E1215 06:53:39.742182 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.242157831 +0000 UTC m=+145.813300742 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.742727 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e63fa2a2-b391-4dc5-a3ef-12eed55a4e99-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-h78rt\" (UID: \"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.748494 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.753017 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.755349 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vpdm\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-kube-api-access-5vpdm\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.760893 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.774009 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.787431 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.799872 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtq8k\" (UniqueName: \"kubernetes.io/projected/e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc-kube-api-access-jtq8k\") pod \"dns-default-qxz8x\" (UID: \"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc\") " pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.808575 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l86t\" (UniqueName: \"kubernetes.io/projected/6bfe1983-046b-4e1f-a36a-c73834069529-kube-api-access-9l86t\") pod \"service-ca-9c57cc56f-fqp2q\" (UID: \"6bfe1983-046b-4e1f-a36a-c73834069529\") " pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.826850 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9h4g\" (UniqueName: \"kubernetes.io/projected/239647d8-4ba9-475a-872b-2b308b1440da-kube-api-access-j9h4g\") pod \"csi-hostpathplugin-72p8b\" (UID: \"239647d8-4ba9-475a-872b-2b308b1440da\") " pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.831314 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.836855 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.844590 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:39 crc kubenswrapper[4876]: E1215 06:53:39.845462 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.345376534 +0000 UTC m=+145.916519445 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.855529 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqfmz\" (UniqueName: \"kubernetes.io/projected/10fa0e8c-4318-4e91-9a17-98b12f8622b3-kube-api-access-zqfmz\") pod \"machine-config-server-gnrsz\" (UID: \"10fa0e8c-4318-4e91-9a17-98b12f8622b3\") " pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.858194 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.906970 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-72p8b" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.914840 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-q7skf"] Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.916340 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-gnrsz" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.929541 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:39 crc kubenswrapper[4876]: I1215 06:53:39.949015 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:39 crc kubenswrapper[4876]: E1215 06:53:39.949395 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.449378588 +0000 UTC m=+146.020521499 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.050492 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.050802 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.550791641 +0000 UTC m=+146.121934552 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.151584 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.151720 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.651700469 +0000 UTC m=+146.222843380 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.152064 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.152381 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.652373207 +0000 UTC m=+146.223516118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.253161 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.253400 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.753365047 +0000 UTC m=+146.324507958 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.253475 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.253810 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.75379428 +0000 UTC m=+146.324937191 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.312587 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-znkl5"] Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.313927 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp"] Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.346343 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl"] Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.355006 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.355302 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.855258873 +0000 UTC m=+146.426401784 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.355572 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.355937 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.855920212 +0000 UTC m=+146.427063143 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.443447 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw"] Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.457852 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.461782 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:40.961740807 +0000 UTC m=+146.532883718 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.469062 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr"] Dec 15 06:53:40 crc kubenswrapper[4876]: W1215 06:53:40.501752 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb940030_24f8_4f6d_990b_7787f4eacf13.slice/crio-69680263d24e537109bf804e221935934090de78aaba63233b98b893c8173b17 WatchSource:0}: Error finding container 69680263d24e537109bf804e221935934090de78aaba63233b98b893c8173b17: Status 404 returned error can't find the container with id 69680263d24e537109bf804e221935934090de78aaba63233b98b893c8173b17 Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.566790 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.567862 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:41.06784655 +0000 UTC m=+146.638989451 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.614781 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" event={"ID":"a4b60d32-905c-4dfe-b8b3-49080372e76b","Type":"ContainerStarted","Data":"a634d08e5996c909495bad719223a1779f55ec297eb04d0723d6f4ac06a40d47"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.650022 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x6r8c" event={"ID":"4771573b-f753-4de4-bfb5-7fe3608a0b53","Type":"ContainerStarted","Data":"4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.668657 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.668875 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:41.16883302 +0000 UTC m=+146.739975931 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.669177 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.669865 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:41.169858289 +0000 UTC m=+146.741001190 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.691165 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" event={"ID":"4ec0397d-8db5-44e7-a654-bd08ec4e7ed4","Type":"ContainerStarted","Data":"1a4c3e62565a3dab71a033b9516d61afa97de154a4de805d95f24d5c2cf42be7"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.778584 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.779470 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.779585 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.779624 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.779690 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.781041 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:41.281012714 +0000 UTC m=+146.852155625 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.799557 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.816485 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-p5nck" event={"ID":"631b168a-5a6c-4c8e-9854-52ab4c74d9a3","Type":"ContainerStarted","Data":"7ee9545d0bf31860b9a6cb4654eb849aff6417de88a774fc91c7fb721ca12073"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.820255 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.823841 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" event={"ID":"eb940030-24f8-4f6d-990b-7787f4eacf13","Type":"ContainerStarted","Data":"69680263d24e537109bf804e221935934090de78aaba63233b98b893c8173b17"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.826851 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.829626 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.840568 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.845565 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.850534 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.882157 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.882595 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:41.38258028 +0000 UTC m=+146.953723191 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.904786 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" event={"ID":"69946ceb-62be-4cf4-a350-d5e37ad74eaf","Type":"ContainerStarted","Data":"589bee36919f2b47f6e4e16f558a29f6893029e72dd8f6c062eefd45f46fc402"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.933983 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" event={"ID":"6405a4d3-c1bb-4cdd-99ae-fd1382abbd01","Type":"ContainerStarted","Data":"945f0c36643fa2c8da910cb60ae33193dd573bf25bc571a7cd36c1a4cdf82c0e"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.949522 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" event={"ID":"be980b8d-1606-4294-8263-1df678c279fb","Type":"ContainerStarted","Data":"2cd532df793d991b7b618d7ab95274fde75473ecaa881f0cedf79e47cc68e172"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.957553 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" event={"ID":"bfe06425-73ac-4753-bdba-7d1f1797b4c6","Type":"ContainerStarted","Data":"35bab5519f21d26eb2c0b3ddbb80c757c246109de4201b582b9ae800d1c8ff89"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.960493 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" event={"ID":"0e84c5be-bb73-42da-b000-529df9355a16","Type":"ContainerStarted","Data":"3dee1d2cb99f8879cb7e487851bdd34fba8563f18ad582928a04baf89e3301cc"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.965630 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" event={"ID":"66c67ba0-faba-44ed-afc3-6dfcad78a13b","Type":"ContainerStarted","Data":"94b73fecb1c7be57fcb48847b1416de49560f28878c6fe719830710dbdc44a42"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.981209 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" event={"ID":"ca3c2acc-5299-4f75-a891-e018580e62ee","Type":"ContainerStarted","Data":"2fe889aed73de84debf48f895bda87b0217571fc821e9cd823979b00eefbcefb"} Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.981863 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:40 crc kubenswrapper[4876]: I1215 06:53:40.988632 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:40 crc kubenswrapper[4876]: E1215 06:53:40.992530 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:41.4925026 +0000 UTC m=+147.063645521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.025191 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" event={"ID":"c6defe56-040f-4e05-9b36-28b7d70481df","Type":"ContainerStarted","Data":"0a91f1e8b4efae09cd9e4df015d394e545f5cc2f46386d58090f7b350ddb67bb"} Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.043642 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" event={"ID":"62176145-6d99-48fb-866e-2c248f531ae5","Type":"ContainerStarted","Data":"1b8c70f03cfa386f728a7641fe83d15311e1a46d0706d51ae5273e6ae4a2d74f"} Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.067091 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" event={"ID":"e6f138ff-658f-41f4-8067-72f1882a25a5","Type":"ContainerStarted","Data":"5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1"} Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.068169 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.082247 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" event={"ID":"a95b0c4e-e837-4693-96a6-78074ff5278d","Type":"ContainerStarted","Data":"e2a2557900691797c3bc8cc2be070d2ad2c7243420509a6002d334b8c35ab900"} Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.095980 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:41 crc kubenswrapper[4876]: E1215 06:53:41.097852 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:41.597840173 +0000 UTC m=+147.168983084 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.108189 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-q7skf" event={"ID":"e63114fa-711e-4cb6-b222-d0e317e19488","Type":"ContainerStarted","Data":"b8ac70d2c820886efb59e8a588e347b20d070a0699d5502087aaa9c4b72e7fb6"} Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.135673 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" event={"ID":"5fbba828-5587-4284-b497-12d70cf1b44f","Type":"ContainerStarted","Data":"aa4b41cd6e77c42d4ba01c2c7741bde0da9daeffd6281179ceda25679d509ee9"} Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.152247 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c"] Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.163226 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" event={"ID":"b020cb85-67cf-46ad-8909-823c4a13376e","Type":"ContainerStarted","Data":"6f1af8ed4a408f02464ac1edd63e1d26f3a54c441c392a71981c8e66871a09a1"} Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.164562 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.169501 4876 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-6pnt6 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.169576 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" podUID="b020cb85-67cf-46ad-8909-823c4a13376e" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.198150 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:41 crc kubenswrapper[4876]: E1215 06:53:41.200745 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:41.700722226 +0000 UTC m=+147.271865137 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.353003 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" podStartSLOduration=122.352977873 podStartE2EDuration="2m2.352977873s" podCreationTimestamp="2025-12-15 06:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:41.352608082 +0000 UTC m=+146.923750993" watchObservedRunningTime="2025-12-15 06:53:41.352977873 +0000 UTC m=+146.924120784" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.353829 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:41 crc kubenswrapper[4876]: E1215 06:53:41.354412 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:41.854386182 +0000 UTC m=+147.425529093 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.401484 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" podStartSLOduration=123.40145117 podStartE2EDuration="2m3.40145117s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:41.378540169 +0000 UTC m=+146.949683080" watchObservedRunningTime="2025-12-15 06:53:41.40145117 +0000 UTC m=+146.972594071" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.417920 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-fztl2" podStartSLOduration=123.417887361 podStartE2EDuration="2m3.417887361s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:41.407332186 +0000 UTC m=+146.978475097" watchObservedRunningTime="2025-12-15 06:53:41.417887361 +0000 UTC m=+146.989030272" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.457895 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:41 crc kubenswrapper[4876]: E1215 06:53:41.458751 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:41.958733156 +0000 UTC m=+147.529876067 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.472659 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-dr96f" podStartSLOduration=123.472642476 podStartE2EDuration="2m3.472642476s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:41.446181474 +0000 UTC m=+147.017324385" watchObservedRunningTime="2025-12-15 06:53:41.472642476 +0000 UTC m=+147.043785387" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.474058 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-2ngb7" podStartSLOduration=123.474051725 podStartE2EDuration="2m3.474051725s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:41.471938966 +0000 UTC m=+147.043081887" watchObservedRunningTime="2025-12-15 06:53:41.474051725 +0000 UTC m=+147.045194626" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.495447 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-x94pn" podStartSLOduration=123.495432404 podStartE2EDuration="2m3.495432404s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:41.49528876 +0000 UTC m=+147.066431671" watchObservedRunningTime="2025-12-15 06:53:41.495432404 +0000 UTC m=+147.066575315" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.560092 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:41 crc kubenswrapper[4876]: E1215 06:53:41.560600 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:42.06058553 +0000 UTC m=+147.631728441 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.627401 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" podStartSLOduration=123.627382872 podStartE2EDuration="2m3.627382872s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:41.626324803 +0000 UTC m=+147.197467724" watchObservedRunningTime="2025-12-15 06:53:41.627382872 +0000 UTC m=+147.198525783" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.627599 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-x6r8c" podStartSLOduration=123.627594518 podStartE2EDuration="2m3.627594518s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:41.58269013 +0000 UTC m=+147.153833041" watchObservedRunningTime="2025-12-15 06:53:41.627594518 +0000 UTC m=+147.198737429" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.660642 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:41 crc kubenswrapper[4876]: E1215 06:53:41.660975 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:42.160960824 +0000 UTC m=+147.732103735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.703965 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.762520 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:41 crc kubenswrapper[4876]: E1215 06:53:41.762797 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:42.262785426 +0000 UTC m=+147.833928337 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.868704 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:41 crc kubenswrapper[4876]: E1215 06:53:41.869087 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:42.369072705 +0000 UTC m=+147.940215616 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.962779 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46"] Dec 15 06:53:41 crc kubenswrapper[4876]: I1215 06:53:41.970717 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:41 crc kubenswrapper[4876]: E1215 06:53:41.971058 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:42.471047573 +0000 UTC m=+148.042190484 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.069326 4876 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-bkzhk container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.12:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.069385 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.12:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.073978 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.074490 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:42.57444539 +0000 UTC m=+148.145588301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.175642 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.176164 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:42.676086829 +0000 UTC m=+148.247229740 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.179842 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" event={"ID":"a4b60d32-905c-4dfe-b8b3-49080372e76b","Type":"ContainerStarted","Data":"96b08861bc5be53b68bfa3d41cd7ac96cd2c6a5850ad4fabdedcf435abf8da86"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.182137 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" event={"ID":"df271eaa-2f40-4651-9942-5c6e535654d6","Type":"ContainerStarted","Data":"3f5a29d4a02007907f41536500e888fba3ff524f1c00a798df119bc7f88ef29b"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.183527 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" event={"ID":"b020cb85-67cf-46ad-8909-823c4a13376e","Type":"ContainerStarted","Data":"8ff9fa9d97d310380259908ad9942a2dedfd1900b5977b7d12c3466779bbe18b"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.198889 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" event={"ID":"b07093d2-2832-4d8b-b80b-d0caf910e6b9","Type":"ContainerStarted","Data":"f09b292ad0fd7ca845dd8fe2817f53433ec7dcd6e2e26e9e9585ba04ff61ff3a"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.203143 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" event={"ID":"be980b8d-1606-4294-8263-1df678c279fb","Type":"ContainerStarted","Data":"4a3f11dee6d8f8599e6d65c13f65e9c942ae5d8069a6e105c87175ddf27d4bf5"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.208463 4876 generic.go:334] "Generic (PLEG): container finished" podID="a18402db-2952-4cfb-bf3a-1f22f23483de" containerID="004955cb85b54d003d2a660f28a50bf72dfc363190b1e9beb0f04f391bcf0a0f" exitCode=0 Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.208578 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" event={"ID":"a18402db-2952-4cfb-bf3a-1f22f23483de","Type":"ContainerDied","Data":"004955cb85b54d003d2a660f28a50bf72dfc363190b1e9beb0f04f391bcf0a0f"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.212199 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" event={"ID":"62cc24e0-8017-4498-b745-b196f50e104f","Type":"ContainerStarted","Data":"9448ef543f3a62f984494356345e5e143a3e1d1930aa0d0451dc24854c2030d3"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.214012 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mhrtf" podStartSLOduration=124.213981921 podStartE2EDuration="2m4.213981921s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:42.211286035 +0000 UTC m=+147.782428986" watchObservedRunningTime="2025-12-15 06:53:42.213981921 +0000 UTC m=+147.785124832" Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.217320 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-p5nck" event={"ID":"631b168a-5a6c-4c8e-9854-52ab4c74d9a3","Type":"ContainerStarted","Data":"fa77eabc9f4a09d783b99ddd9ee8d7f03ff80ae84efbdd5f75c9ebb16494fe5b"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.224319 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-gnrsz" event={"ID":"10fa0e8c-4318-4e91-9a17-98b12f8622b3","Type":"ContainerStarted","Data":"92502f65585422e99e61b0bb0d53f5dbd1699afd6584482c8609bf82d94fe0b0"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.232507 4876 generic.go:334] "Generic (PLEG): container finished" podID="62176145-6d99-48fb-866e-2c248f531ae5" containerID="1b8c70f03cfa386f728a7641fe83d15311e1a46d0706d51ae5273e6ae4a2d74f" exitCode=0 Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.232626 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" event={"ID":"62176145-6d99-48fb-866e-2c248f531ae5","Type":"ContainerDied","Data":"1b8c70f03cfa386f728a7641fe83d15311e1a46d0706d51ae5273e6ae4a2d74f"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.234341 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" event={"ID":"c69b657a-1f19-4576-b07d-9ae5983c0d86","Type":"ContainerStarted","Data":"26931842ebacb3643a3f829769722bb25bfb0574fabc1bdbc91d33a7149d52c1"} Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.237435 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" podStartSLOduration=123.237397037 podStartE2EDuration="2m3.237397037s" podCreationTimestamp="2025-12-15 06:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:42.23143087 +0000 UTC m=+147.802573791" watchObservedRunningTime="2025-12-15 06:53:42.237397037 +0000 UTC m=+147.808539948" Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.264193 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:53:42 crc kubenswrapper[4876]: W1215 06:53:42.270406 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod916c0070_57cd_49dd_bf38_97bfd7bc949f.slice/crio-2cd6b9cc4c195fdf9160ce8c6791cf96186a6fba98b4c07c4e5ada971242d863 WatchSource:0}: Error finding container 2cd6b9cc4c195fdf9160ce8c6791cf96186a6fba98b4c07c4e5ada971242d863: Status 404 returned error can't find the container with id 2cd6b9cc4c195fdf9160ce8c6791cf96186a6fba98b4c07c4e5ada971242d863 Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.291210 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.294777 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:42.794754514 +0000 UTC m=+148.365897425 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.360467 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-t872l" podStartSLOduration=124.360430235 podStartE2EDuration="2m4.360430235s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:42.305483485 +0000 UTC m=+147.876626416" watchObservedRunningTime="2025-12-15 06:53:42.360430235 +0000 UTC m=+147.931573146" Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.398400 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.399312 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:42.899297364 +0000 UTC m=+148.470440275 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.409430 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-p5nck" podStartSLOduration=124.409396527 podStartE2EDuration="2m4.409396527s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:42.38095268 +0000 UTC m=+147.952095591" watchObservedRunningTime="2025-12-15 06:53:42.409396527 +0000 UTC m=+147.980539438" Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.433585 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.463573 4876 patch_prober.go:28] interesting pod/router-default-5444994796-p5nck container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 15 06:53:42 crc kubenswrapper[4876]: [-]has-synced failed: reason withheld Dec 15 06:53:42 crc kubenswrapper[4876]: [+]process-running ok Dec 15 06:53:42 crc kubenswrapper[4876]: healthz check failed Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.463650 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5nck" podUID="631b168a-5a6c-4c8e-9854-52ab4c74d9a3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.499653 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.499951 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:42.999938425 +0000 UTC m=+148.571081336 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.510820 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.603873 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.604694 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.10467831 +0000 UTC m=+148.675821221 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.646239 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5"] Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.646345 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9"] Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.706505 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.706907 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.206859793 +0000 UTC m=+148.778002704 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.707220 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.707721 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.207705986 +0000 UTC m=+148.778848897 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.812096 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.812320 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.312279577 +0000 UTC m=+148.883422488 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.812786 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.818913 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g"] Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.821287 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.321247418 +0000 UTC m=+148.892390329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.842709 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f"] Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.915431 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:42 crc kubenswrapper[4876]: E1215 06:53:42.916039 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.416016224 +0000 UTC m=+148.987159135 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:42 crc kubenswrapper[4876]: I1215 06:53:42.969827 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-x2jnz"] Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.016491 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:43 crc kubenswrapper[4876]: E1215 06:53:43.016769 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.516759137 +0000 UTC m=+149.087902048 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.034492 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw"] Dec 15 06:53:43 crc kubenswrapper[4876]: W1215 06:53:43.091876 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod439195f3_d325_42cc_a3cb_094ddf386f33.slice/crio-3c1b336715a1a1fb707143df31bdba5ae2e57598e62809c02f1ecfd1372ec115 WatchSource:0}: Error finding container 3c1b336715a1a1fb707143df31bdba5ae2e57598e62809c02f1ecfd1372ec115: Status 404 returned error can't find the container with id 3c1b336715a1a1fb707143df31bdba5ae2e57598e62809c02f1ecfd1372ec115 Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.119571 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:43 crc kubenswrapper[4876]: E1215 06:53:43.119863 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.619850527 +0000 UTC m=+149.190993438 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.120461 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvwdp"] Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.145770 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt"] Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.170865 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp"] Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.196016 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-cqvrf"] Dec 15 06:53:43 crc kubenswrapper[4876]: E1215 06:53:43.221523 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.721507185 +0000 UTC m=+149.292650096 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.221081 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.299267 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" event={"ID":"a7ef4763-aa60-4f6d-8066-86746ef01427","Type":"ContainerStarted","Data":"01c5fcded95ab1ef3c253121aa5dc15edd609058bd0cd57868fe398e9cd5e424"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.318365 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" event={"ID":"c89ff00c-fdde-4c6d-b849-43061c3c6b2c","Type":"ContainerStarted","Data":"69f08d609f365db5b8e597feb21a052b446df3b2d222d1b7b112c3e6638fc493"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.322458 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-q7skf" event={"ID":"e63114fa-711e-4cb6-b222-d0e317e19488","Type":"ContainerStarted","Data":"9140361dab5dba82a2bae58699854526f4ab58f217180d10b38a179418e5e32c"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.323849 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.327231 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:43 crc kubenswrapper[4876]: E1215 06:53:43.327660 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.82764289 +0000 UTC m=+149.398785801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.328118 4876 patch_prober.go:28] interesting pod/console-operator-58897d9998-q7skf container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.328147 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-q7skf" podUID="e63114fa-711e-4cb6-b222-d0e317e19488" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.334814 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" event={"ID":"df271eaa-2f40-4651-9942-5c6e535654d6","Type":"ContainerStarted","Data":"5be7d03bb9ea3495b836f4564d633dc2d3ffe84c283b160d677fb54cfc4f5096"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.336799 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68"] Dec 15 06:53:43 crc kubenswrapper[4876]: W1215 06:53:43.339669 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode63fa2a2_b391_4dc5_a3ef_12eed55a4e99.slice/crio-aedd93e120e88f58bf319b3c4dcee2f2204e8ec902186d4d2a8b46a8232178da WatchSource:0}: Error finding container aedd93e120e88f58bf319b3c4dcee2f2204e8ec902186d4d2a8b46a8232178da: Status 404 returned error can't find the container with id aedd93e120e88f58bf319b3c4dcee2f2204e8ec902186d4d2a8b46a8232178da Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.379552 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-q7skf" podStartSLOduration=125.379528563 podStartE2EDuration="2m5.379528563s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:43.36587008 +0000 UTC m=+148.937012991" watchObservedRunningTime="2025-12-15 06:53:43.379528563 +0000 UTC m=+148.950671484" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.380150 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fqp2q"] Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.383496 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" event={"ID":"66c67ba0-faba-44ed-afc3-6dfcad78a13b","Type":"ContainerStarted","Data":"2d8c2a7c9acefc246f8271cd87d941bd5c5d092a1cb0ed02d2c454bf43895af4"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.384721 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.397060 4876 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-hz5lw container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.397162 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" podUID="66c67ba0-faba-44ed-afc3-6dfcad78a13b" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Dec 15 06:53:43 crc kubenswrapper[4876]: W1215 06:53:43.408405 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e1616af_82dc_469c_8ecb_849c3122fe02.slice/crio-b522c0531f20370dc12b88703d57aaaf210d06c3ee61266cef2801e9123d1409 WatchSource:0}: Error finding container b522c0531f20370dc12b88703d57aaaf210d06c3ee61266cef2801e9123d1409: Status 404 returned error can't find the container with id b522c0531f20370dc12b88703d57aaaf210d06c3ee61266cef2801e9123d1409 Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.423789 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" podStartSLOduration=125.423767494 podStartE2EDuration="2m5.423767494s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:43.423666211 +0000 UTC m=+148.994809112" watchObservedRunningTime="2025-12-15 06:53:43.423767494 +0000 UTC m=+148.994910405" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.432094 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.435132 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-72p8b"] Dec 15 06:53:43 crc kubenswrapper[4876]: E1215 06:53:43.435953 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:43.935927044 +0000 UTC m=+149.507070135 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.464120 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" event={"ID":"62176145-6d99-48fb-866e-2c248f531ae5","Type":"ContainerStarted","Data":"483cb044696170d35f6b8f0bb922c64744fc52c87f6de0a3897b9aed49b40985"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.466387 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.472440 4876 patch_prober.go:28] interesting pod/router-default-5444994796-p5nck container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 15 06:53:43 crc kubenswrapper[4876]: [-]has-synced failed: reason withheld Dec 15 06:53:43 crc kubenswrapper[4876]: [+]process-running ok Dec 15 06:53:43 crc kubenswrapper[4876]: healthz check failed Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.472597 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5nck" podUID="631b168a-5a6c-4c8e-9854-52ab4c74d9a3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.527042 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-qxz8x"] Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.529783 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" event={"ID":"d32f3652-3eaf-46ee-a25c-378d553251a7","Type":"ContainerStarted","Data":"448f8ab5da96066d2ec59d5030107147592df1b85a8c23e3618d8f828b513cea"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.532944 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:43 crc kubenswrapper[4876]: E1215 06:53:43.534683 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:44.034658811 +0000 UTC m=+149.605801722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.583885 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" podStartSLOduration=125.58385001 podStartE2EDuration="2m5.58385001s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:43.551525534 +0000 UTC m=+149.122668445" watchObservedRunningTime="2025-12-15 06:53:43.58385001 +0000 UTC m=+149.154993091" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.599230 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.599497 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.649029 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:43 crc kubenswrapper[4876]: E1215 06:53:43.649541 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:44.14952846 +0000 UTC m=+149.720671361 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.677784 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" event={"ID":"62cc24e0-8017-4498-b745-b196f50e104f","Type":"ContainerStarted","Data":"1f32510de10aed201ce96278637c038e1d364f10389163fd03c68654313f424c"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.683319 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" podStartSLOduration=125.683301536 podStartE2EDuration="2m5.683301536s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:43.68125582 +0000 UTC m=+149.252398741" watchObservedRunningTime="2025-12-15 06:53:43.683301536 +0000 UTC m=+149.254444447" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.699708 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj"] Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.703797 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.725768 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" event={"ID":"eb940030-24f8-4f6d-990b-7787f4eacf13","Type":"ContainerStarted","Data":"ad2671e42c72e9f498a33f868136e0d0dd96871ed7dd47d04450f4eea9896932"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.744362 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" podStartSLOduration=125.744292336 podStartE2EDuration="2m5.744292336s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:43.741608081 +0000 UTC m=+149.312750992" watchObservedRunningTime="2025-12-15 06:53:43.744292336 +0000 UTC m=+149.315435247" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.751882 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46" event={"ID":"916c0070-57cd-49dd-bf38-97bfd7bc949f","Type":"ContainerStarted","Data":"9194aa7ff7b124caa1ff1b1b7751d6f2c48fed79664e246f54aa24e58678baa0"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.751945 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46" event={"ID":"916c0070-57cd-49dd-bf38-97bfd7bc949f","Type":"ContainerStarted","Data":"2cd6b9cc4c195fdf9160ce8c6791cf96186a6fba98b4c07c4e5ada971242d863"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.752139 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:43 crc kubenswrapper[4876]: E1215 06:53:43.753344 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:44.253321719 +0000 UTC m=+149.824464640 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.756632 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" event={"ID":"a18402db-2952-4cfb-bf3a-1f22f23483de","Type":"ContainerStarted","Data":"3a1342fcfc919fad9e0f5e265ff9396764655ffaedf3d03514c1cc1dc78678fb"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.757847 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" event={"ID":"bfe06425-73ac-4753-bdba-7d1f1797b4c6","Type":"ContainerStarted","Data":"facb40b0ef6b099e23b878ba4b9d480025e0197ce4dd9a78108bfcca036cd72f"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.812599 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" event={"ID":"d7464df4-d82d-44d4-9019-04cbc4cd7dc0","Type":"ContainerStarted","Data":"ffff33e5030c2712e10269c7b85574a73999d85a125ca5267012dacd2b4260d3"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.842576 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" event={"ID":"a95b0c4e-e837-4693-96a6-78074ff5278d","Type":"ContainerStarted","Data":"afc0880e4fb6a662a1db8715086c63a7c6fc1dc95f08834410791601fa5b4007"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.855711 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:43 crc kubenswrapper[4876]: E1215 06:53:43.856749 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:44.356733557 +0000 UTC m=+149.927876468 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.858128 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-gnrsz" event={"ID":"10fa0e8c-4318-4e91-9a17-98b12f8622b3","Type":"ContainerStarted","Data":"2582c333c804a90064b1c75861384ef18176354ad411a886e5e719df1c8cd505"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.884260 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kk5bp" podStartSLOduration=125.884221738 podStartE2EDuration="2m5.884221738s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:43.881634275 +0000 UTC m=+149.452777186" watchObservedRunningTime="2025-12-15 06:53:43.884221738 +0000 UTC m=+149.455364649" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.894096 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" event={"ID":"8c73655f-0c0a-49a9-83a6-b8e743618809","Type":"ContainerStarted","Data":"a43a709efb8a54e456720b5b23a59d9a948ad64ba316295fbd08aafdb3b8abf4"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.894156 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" event={"ID":"8c73655f-0c0a-49a9-83a6-b8e743618809","Type":"ContainerStarted","Data":"dd03d9bfb25158e5c5a08cce7b81d9cade11e7c4b5c612e310fa40a19744861e"} Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.895167 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.913227 4876 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-c8vw5 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" start-of-body= Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.913298 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" podUID="8c73655f-0c0a-49a9-83a6-b8e743618809" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.934172 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-gnrsz" podStartSLOduration=7.934151526 podStartE2EDuration="7.934151526s" podCreationTimestamp="2025-12-15 06:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:43.929472645 +0000 UTC m=+149.500615556" watchObservedRunningTime="2025-12-15 06:53:43.934151526 +0000 UTC m=+149.505294437" Dec 15 06:53:43 crc kubenswrapper[4876]: I1215 06:53:43.964519 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:43 crc kubenswrapper[4876]: E1215 06:53:43.965369 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:44.465355181 +0000 UTC m=+150.036498092 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.020413 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" event={"ID":"0e84c5be-bb73-42da-b000-529df9355a16","Type":"ContainerStarted","Data":"9c813d1787e58fd2189507286ed950114c9f4faf2b2b9d10f6aa89a6dfdf9425"} Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.021296 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-95pzl" podStartSLOduration=126.021268157 podStartE2EDuration="2m6.021268157s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:43.984634551 +0000 UTC m=+149.555777462" watchObservedRunningTime="2025-12-15 06:53:44.021268157 +0000 UTC m=+149.592411068" Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.044895 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" podStartSLOduration=126.044865629 podStartE2EDuration="2m6.044865629s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:44.02065478 +0000 UTC m=+149.591797681" watchObservedRunningTime="2025-12-15 06:53:44.044865629 +0000 UTC m=+149.616008540" Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.065584 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" event={"ID":"c69b657a-1f19-4576-b07d-9ae5983c0d86","Type":"ContainerStarted","Data":"5ddf586364e1dcaba7e78990b51ac5f5af5a6e3a9e6c0286b61691fad1c7aba8"} Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.066717 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:44 crc kubenswrapper[4876]: E1215 06:53:44.066978 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:44.566968018 +0000 UTC m=+150.138110929 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.076719 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-48ggq" podStartSLOduration=126.076701121 podStartE2EDuration="2m6.076701121s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:44.076533776 +0000 UTC m=+149.647676687" watchObservedRunningTime="2025-12-15 06:53:44.076701121 +0000 UTC m=+149.647844032" Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.124773 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l7cgr" podStartSLOduration=126.124751177 podStartE2EDuration="2m6.124751177s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:44.122764642 +0000 UTC m=+149.693907563" watchObservedRunningTime="2025-12-15 06:53:44.124751177 +0000 UTC m=+149.695894098" Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.133492 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-x2jnz" event={"ID":"439195f3-d325-42cc-a3cb-094ddf386f33","Type":"ContainerStarted","Data":"3c1b336715a1a1fb707143df31bdba5ae2e57598e62809c02f1ecfd1372ec115"} Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.170695 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:44 crc kubenswrapper[4876]: E1215 06:53:44.172901 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:44.672878487 +0000 UTC m=+150.244021398 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.204233 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" event={"ID":"69946ceb-62be-4cf4-a350-d5e37ad74eaf","Type":"ContainerStarted","Data":"792306df28cd595360315801df2b8f4d51a9f3053e80ec2dfe29aebd203ccf82"} Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.226437 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vlh5c" Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.248640 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-9cvz2" podStartSLOduration=126.248618259 podStartE2EDuration="2m6.248618259s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:44.245000157 +0000 UTC m=+149.816143068" watchObservedRunningTime="2025-12-15 06:53:44.248618259 +0000 UTC m=+149.819761170" Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.277932 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:44 crc kubenswrapper[4876]: E1215 06:53:44.309778 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:44.809755973 +0000 UTC m=+150.380898884 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.388451 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:44 crc kubenswrapper[4876]: E1215 06:53:44.388776 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:44.888760006 +0000 UTC m=+150.459902917 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.441968 4876 patch_prober.go:28] interesting pod/router-default-5444994796-p5nck container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 15 06:53:44 crc kubenswrapper[4876]: [-]has-synced failed: reason withheld Dec 15 06:53:44 crc kubenswrapper[4876]: [+]process-running ok Dec 15 06:53:44 crc kubenswrapper[4876]: healthz check failed Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.442353 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5nck" podUID="631b168a-5a6c-4c8e-9854-52ab4c74d9a3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.495541 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:44 crc kubenswrapper[4876]: E1215 06:53:44.495964 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:44.99595165 +0000 UTC m=+150.567094561 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.596802 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:44 crc kubenswrapper[4876]: E1215 06:53:44.597012 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:45.096973001 +0000 UTC m=+150.668115912 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.597249 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:44 crc kubenswrapper[4876]: E1215 06:53:44.597712 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:45.097691841 +0000 UTC m=+150.668834752 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.698238 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:44 crc kubenswrapper[4876]: E1215 06:53:44.699178 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:45.199152095 +0000 UTC m=+150.770295006 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.807679 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:44 crc kubenswrapper[4876]: E1215 06:53:44.808176 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:45.308159689 +0000 UTC m=+150.879302600 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:44 crc kubenswrapper[4876]: I1215 06:53:44.912675 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:44 crc kubenswrapper[4876]: E1215 06:53:44.913385 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:45.413360238 +0000 UTC m=+150.984503149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.017698 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:45 crc kubenswrapper[4876]: E1215 06:53:45.018792 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:45.51874229 +0000 UTC m=+151.089885371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.119641 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:45 crc kubenswrapper[4876]: E1215 06:53:45.120229 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:45.620206913 +0000 UTC m=+151.191349824 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.221014 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:45 crc kubenswrapper[4876]: E1215 06:53:45.221433 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:45.72141704 +0000 UTC m=+151.292559951 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.240609 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" event={"ID":"a7ef4763-aa60-4f6d-8066-86746ef01427","Type":"ContainerStarted","Data":"b561c282f46c61c25ecb8afd13fa8bba20be8cb937c772a04393a6d07b861f5e"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.242176 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.246453 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" event={"ID":"c89ff00c-fdde-4c6d-b849-43061c3c6b2c","Type":"ContainerStarted","Data":"981cc7d5e13bb30b9b6b9c7342e0f0740c2c37d8a44d01f8879eefdd30a62a20"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.246505 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" event={"ID":"c89ff00c-fdde-4c6d-b849-43061c3c6b2c","Type":"ContainerStarted","Data":"77cccc0c450fc131cc29880079b5d3c4725c0d68348f67dbb8e7abb7835003fb"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.247358 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.256793 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" event={"ID":"412024f1-f06e-4af5-ab14-d2bf11d7c6d3","Type":"ContainerStarted","Data":"5710a8d0736334da5d17dc2a07274b9f0d661b44575874ffaeb6426855b140a0"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.256841 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" event={"ID":"412024f1-f06e-4af5-ab14-d2bf11d7c6d3","Type":"ContainerStarted","Data":"415450e9d6afe2a12919eb28694eac825012c7de71496c11ce54c94e6ce40331"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.256852 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" event={"ID":"412024f1-f06e-4af5-ab14-d2bf11d7c6d3","Type":"ContainerStarted","Data":"391ba7e46c7e0640fe92efbe00ac4549751af9e1ad9ea004d10f7c5b55f6945f"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.260998 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.282661 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" event={"ID":"47b07d03-5c39-4966-8850-ed81bcfc8e94","Type":"ContainerStarted","Data":"868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.282737 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" event={"ID":"47b07d03-5c39-4966-8850-ed81bcfc8e94","Type":"ContainerStarted","Data":"868dc7fe2d7b8f1233adb9729e4cad34c3ff05441848e75212df2372377678cd"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.283835 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.289532 4876 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-kvwdp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.289606 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.297812 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"cd5135730832d6aa5a47271291947b1582aad77bd1f46faf27c01f48ba8d73db"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.297865 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4902223282ea6f79a311468b188c4286a6ab6a30edbad2b5b7dbcb8abf4c1e3d"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.328251 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:45 crc kubenswrapper[4876]: E1215 06:53:45.330056 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:45.830028833 +0000 UTC m=+151.401171744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.379918 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46" event={"ID":"916c0070-57cd-49dd-bf38-97bfd7bc949f","Type":"ContainerStarted","Data":"6736defec76b8e121f2e96466bc3594c1b0a240ab11054713b948d89922569af"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.416265 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"a8d6769fea8aeb6310325d94c9b7156d5946d273bd11c022d6e9bc655024a387"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.416313 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f23cb89623608f6cfdad6c6fbd16dc065d5342e278c6dc4270fcfe9b54cb9f86"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.426604 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" event={"ID":"d7464df4-d82d-44d4-9019-04cbc4cd7dc0","Type":"ContainerStarted","Data":"27f4bc4e62dd490d56881e236dfefa2533a43b083ac101bc4b3ec02986df6e12"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.426652 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" event={"ID":"d7464df4-d82d-44d4-9019-04cbc4cd7dc0","Type":"ContainerStarted","Data":"c325da8c5dab2d17c199e3ca5f4379af7e1d648ef1034e13a7019877ae8486f1"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.430236 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:45 crc kubenswrapper[4876]: E1215 06:53:45.430532 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:45.93052037 +0000 UTC m=+151.501663281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.438821 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" event={"ID":"6bfe1983-046b-4e1f-a36a-c73834069529","Type":"ContainerStarted","Data":"3800ddefcfc4a81681568250de07e2f6a815b5fb862f80707a079abf04e8d8d4"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.438891 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" event={"ID":"6bfe1983-046b-4e1f-a36a-c73834069529","Type":"ContainerStarted","Data":"e11f320b78b61a6a15e32af1666ffb2cc1a71d19527eae6659fe4cc9194529f1"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.443453 4876 patch_prober.go:28] interesting pod/router-default-5444994796-p5nck container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 15 06:53:45 crc kubenswrapper[4876]: [-]has-synced failed: reason withheld Dec 15 06:53:45 crc kubenswrapper[4876]: [+]process-running ok Dec 15 06:53:45 crc kubenswrapper[4876]: healthz check failed Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.443523 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5nck" podUID="631b168a-5a6c-4c8e-9854-52ab4c74d9a3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.463179 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-qxz8x" event={"ID":"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc","Type":"ContainerStarted","Data":"f4b02d7dfbde9af52a544b8f09f4100f94fc1e9631b11aa9203cefe1014eef44"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.480274 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" event={"ID":"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99","Type":"ContainerStarted","Data":"b94186fecd242174d9c84f845f7c7acef0934552a3a166a24315874b4a1420ba"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.480348 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" event={"ID":"e63fa2a2-b391-4dc5-a3ef-12eed55a4e99","Type":"ContainerStarted","Data":"aedd93e120e88f58bf319b3c4dcee2f2204e8ec902186d4d2a8b46a8232178da"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.506047 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" event={"ID":"eb940030-24f8-4f6d-990b-7787f4eacf13","Type":"ContainerStarted","Data":"fd3d0902548f62bf31d7de1b8021c494dd3ea5b0df6e106e090ab09554289a40"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.525061 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" event={"ID":"d32f3652-3eaf-46ee-a25c-378d553251a7","Type":"ContainerStarted","Data":"be7e870c0ae647bdbec25363ba08596c8ff15407c4c2f1d28065fea9533b9527"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.531562 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:45 crc kubenswrapper[4876]: E1215 06:53:45.533048 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:46.033018642 +0000 UTC m=+151.604161553 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.545390 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-x2jnz" event={"ID":"439195f3-d325-42cc-a3cb-094ddf386f33","Type":"ContainerStarted","Data":"1ce31198f5385df7e190b0abf998da6d41fdee2cf6c0f84345c5c253b98e9bb8"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.581992 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" event={"ID":"7e1616af-82dc-469c-8ecb-849c3122fe02","Type":"ContainerStarted","Data":"b522c0531f20370dc12b88703d57aaaf210d06c3ee61266cef2801e9123d1409"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.640424 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-8nkpf" event={"ID":"62cc24e0-8017-4498-b745-b196f50e104f","Type":"ContainerStarted","Data":"4cafd5c422c22fc36b8c4d0b7778f5b7714befcf1e65a57d74b391dd6bcb70c4"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.645042 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:45 crc kubenswrapper[4876]: E1215 06:53:45.648528 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:46.148510459 +0000 UTC m=+151.719653370 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.696309 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" event={"ID":"68787888-eea8-4079-89a0-6718cabcd14b","Type":"ContainerStarted","Data":"a2e2c93ac68c294c31b9c0427fddaea5f848aa18fcbc74aa15cb69aa5053dfb9"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.696359 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" event={"ID":"68787888-eea8-4079-89a0-6718cabcd14b","Type":"ContainerStarted","Data":"d5680a25b4e1adc7359ccc545ea4588f632075b4bfa00387ab10b7d57353160f"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.737234 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-cqvrf" event={"ID":"edee3ad2-c1e0-4331-a30b-71e541f430af","Type":"ContainerStarted","Data":"0742c05b6b1f10d7de3641a288655a2e71cd7e203e0c1418c7d50989fe754286"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.737322 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-cqvrf" event={"ID":"edee3ad2-c1e0-4331-a30b-71e541f430af","Type":"ContainerStarted","Data":"821e7469858883fab35bb6b5a4e0611b11708f4c412de040b0c5905b6276912b"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.738763 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-cqvrf" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.743809 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" event={"ID":"a18402db-2952-4cfb-bf3a-1f22f23483de","Type":"ContainerStarted","Data":"13ab1b720f48f482f28ffe5b84e4d206b1b4b8c2aa88dbac238991024ac0a2d5"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.745611 4876 patch_prober.go:28] interesting pod/downloads-7954f5f757-cqvrf container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.745654 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cqvrf" podUID="edee3ad2-c1e0-4331-a30b-71e541f430af" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.754092 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:45 crc kubenswrapper[4876]: E1215 06:53:45.755744 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:46.255722853 +0000 UTC m=+151.826865754 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.767530 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" podStartSLOduration=127.767503134 podStartE2EDuration="2m7.767503134s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:45.765506507 +0000 UTC m=+151.336649418" watchObservedRunningTime="2025-12-15 06:53:45.767503134 +0000 UTC m=+151.338646055" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.770570 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"14a5108980a713e1c51fa08db14cdddd00ed1ec9bdf39c80f42efb76805ae50c"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.770648 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"3ed40c79172d6c12ad8bf090923c0cdd844be90596e76006a3b12c116808bb72"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.771474 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.791036 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-x2jnz" podStartSLOduration=9.791005851 podStartE2EDuration="9.791005851s" podCreationTimestamp="2025-12-15 06:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:45.790974861 +0000 UTC m=+151.362117772" watchObservedRunningTime="2025-12-15 06:53:45.791005851 +0000 UTC m=+151.362148762" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.823242 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-72p8b" event={"ID":"239647d8-4ba9-475a-872b-2b308b1440da","Type":"ContainerStarted","Data":"07f0df5629e33a923483f718a043e2d16d32daadc5da7074dfaea21e733e236a"} Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.840901 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hz5lw" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.856290 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-c8vw5" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.866248 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-f28h8" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.874402 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:45 crc kubenswrapper[4876]: E1215 06:53:45.892128 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:46.392084015 +0000 UTC m=+151.963226926 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.949333 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-lzgq9" podStartSLOduration=127.949304548 podStartE2EDuration="2m7.949304548s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:45.865404457 +0000 UTC m=+151.436547388" watchObservedRunningTime="2025-12-15 06:53:45.949304548 +0000 UTC m=+151.520447469" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.950604 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9gltw" podStartSLOduration=127.950596764 podStartE2EDuration="2m7.950596764s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:45.948943028 +0000 UTC m=+151.520085949" watchObservedRunningTime="2025-12-15 06:53:45.950596764 +0000 UTC m=+151.521739675" Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.986677 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:45 crc kubenswrapper[4876]: E1215 06:53:45.988432 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:46.488415334 +0000 UTC m=+152.059558245 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:45 crc kubenswrapper[4876]: I1215 06:53:45.993818 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-znkl5" podStartSLOduration=127.993794624 podStartE2EDuration="2m7.993794624s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:45.993054764 +0000 UTC m=+151.564197665" watchObservedRunningTime="2025-12-15 06:53:45.993794624 +0000 UTC m=+151.564937535" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.069510 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" podStartSLOduration=128.069469195 podStartE2EDuration="2m8.069469195s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.054932548 +0000 UTC m=+151.626075459" watchObservedRunningTime="2025-12-15 06:53:46.069469195 +0000 UTC m=+151.640612306" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.099379 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.099723 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:46.599708923 +0000 UTC m=+152.170851834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.138695 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-fqp2q" podStartSLOduration=127.138678965 podStartE2EDuration="2m7.138678965s" podCreationTimestamp="2025-12-15 06:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.138386867 +0000 UTC m=+151.709529788" watchObservedRunningTime="2025-12-15 06:53:46.138678965 +0000 UTC m=+151.709821876" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.204817 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.205147 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:46.705127827 +0000 UTC m=+152.276270738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.246254 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-k8hdp" podStartSLOduration=128.24623918 podStartE2EDuration="2m8.24623918s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.244934123 +0000 UTC m=+151.816077034" watchObservedRunningTime="2025-12-15 06:53:46.24623918 +0000 UTC m=+151.817382091" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.248235 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" podStartSLOduration=128.248226275 podStartE2EDuration="2m8.248226275s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.214470459 +0000 UTC m=+151.785613370" watchObservedRunningTime="2025-12-15 06:53:46.248226275 +0000 UTC m=+151.819369176" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.299160 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" podStartSLOduration=128.299136751 podStartE2EDuration="2m8.299136751s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.295735466 +0000 UTC m=+151.866878387" watchObservedRunningTime="2025-12-15 06:53:46.299136751 +0000 UTC m=+151.870279662" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.306888 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.307216 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:46.807203878 +0000 UTC m=+152.378346789 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.372736 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-xlh8f" podStartSLOduration=127.372721104 podStartE2EDuration="2m7.372721104s" podCreationTimestamp="2025-12-15 06:51:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.342431105 +0000 UTC m=+151.913574026" watchObservedRunningTime="2025-12-15 06:53:46.372721104 +0000 UTC m=+151.943864015" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.373482 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-h78rt" podStartSLOduration=128.373476354 podStartE2EDuration="2m8.373476354s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.370437779 +0000 UTC m=+151.941580690" watchObservedRunningTime="2025-12-15 06:53:46.373476354 +0000 UTC m=+151.944619265" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.407971 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.408214 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:46.908181817 +0000 UTC m=+152.479324728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.408464 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.408949 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:46.908931529 +0000 UTC m=+152.480074450 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.437981 4876 patch_prober.go:28] interesting pod/router-default-5444994796-p5nck container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 15 06:53:46 crc kubenswrapper[4876]: [-]has-synced failed: reason withheld Dec 15 06:53:46 crc kubenswrapper[4876]: [+]process-running ok Dec 15 06:53:46 crc kubenswrapper[4876]: healthz check failed Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.438054 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5nck" podUID="631b168a-5a6c-4c8e-9854-52ab4c74d9a3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.441158 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-g6q46" podStartSLOduration=128.441144471 podStartE2EDuration="2m8.441144471s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.418513557 +0000 UTC m=+151.989656488" watchObservedRunningTime="2025-12-15 06:53:46.441144471 +0000 UTC m=+152.012287382" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.485224 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-cqvrf" podStartSLOduration=128.485208386 podStartE2EDuration="2m8.485208386s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.481754659 +0000 UTC m=+152.052897560" watchObservedRunningTime="2025-12-15 06:53:46.485208386 +0000 UTC m=+152.056351297" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.509690 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.510207 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.010186906 +0000 UTC m=+152.581329817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.510664 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-q7skf" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.532544 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" podStartSLOduration=128.532513542 podStartE2EDuration="2m8.532513542s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.528667904 +0000 UTC m=+152.099810825" watchObservedRunningTime="2025-12-15 06:53:46.532513542 +0000 UTC m=+152.103656453" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.611362 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.611892 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.111872316 +0000 UTC m=+152.683015227 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.722793 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.724418 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.224382328 +0000 UTC m=+152.795525229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.724618 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.725013 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.225002276 +0000 UTC m=+152.796145187 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.827984 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.828202 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.328160527 +0000 UTC m=+152.899303438 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.828578 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.829048 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.329031432 +0000 UTC m=+152.900174343 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.880800 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-w2r68" event={"ID":"7e1616af-82dc-469c-8ecb-849c3122fe02","Type":"ContainerStarted","Data":"9d5170d3622ef6a1bc07a1fab48d71c869122750b23287d572f56a88788b1e50"} Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.919828 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-qxz8x" event={"ID":"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc","Type":"ContainerStarted","Data":"65789bbef463723b40be4f470f9d5e38ee652b650c888c3501963ab387e50f51"} Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.919909 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-qxz8x" event={"ID":"e12c07fa-c5ff-4a5b-9188-1a1fbb0d9ccc","Type":"ContainerStarted","Data":"e666232d683f2d0302d052d67eeb3552eef86130c33a1656373c7bc681422ac4"} Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.919963 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.930982 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-8p2gj" event={"ID":"68787888-eea8-4079-89a0-6718cabcd14b","Type":"ContainerStarted","Data":"1b65331e383ad36f1a522121f44f99183540903bf33415eb920a6ae0198be1c9"} Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.933436 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-72p8b" event={"ID":"239647d8-4ba9-475a-872b-2b308b1440da","Type":"ContainerStarted","Data":"bb77d3e0b70478142e71cfe6f8feb611d28fa303116c6b3c7c976eee1230b48e"} Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.933899 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:46 crc kubenswrapper[4876]: E1215 06:53:46.935620 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.435596978 +0000 UTC m=+153.006739889 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.943341 4876 patch_prober.go:28] interesting pod/downloads-7954f5f757-cqvrf container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.943407 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cqvrf" podUID="edee3ad2-c1e0-4331-a30b-71e541f430af" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.946679 4876 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-kvwdp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.946751 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Dec 15 06:53:46 crc kubenswrapper[4876]: I1215 06:53:46.954237 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.010437 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-qxz8x" podStartSLOduration=11.010417344 podStartE2EDuration="11.010417344s" podCreationTimestamp="2025-12-15 06:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:46.971146704 +0000 UTC m=+152.542289635" watchObservedRunningTime="2025-12-15 06:53:47.010417344 +0000 UTC m=+152.581560255" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.037036 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.037467 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.537450792 +0000 UTC m=+153.108593703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.138174 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.138465 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.638427022 +0000 UTC m=+153.209569933 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.138670 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.139062 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.639051889 +0000 UTC m=+153.210194800 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.237352 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kf2qs"] Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.238736 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.239732 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.240220 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.740199223 +0000 UTC m=+153.311342134 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.251382 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.254314 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kf2qs"] Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.341754 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-utilities\") pod \"community-operators-kf2qs\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.341796 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p9kz\" (UniqueName: \"kubernetes.io/projected/7dd38562-2323-4992-a015-7ba42406c1b5-kube-api-access-2p9kz\") pod \"community-operators-kf2qs\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.341858 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.341883 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-catalog-content\") pod \"community-operators-kf2qs\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.342182 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.842170321 +0000 UTC m=+153.413313232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.419473 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9n2p9"] Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.421158 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.426164 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443092 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443189 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9n2p9"] Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.443450 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.943423509 +0000 UTC m=+153.514566420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443505 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443547 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-catalog-content\") pod \"certified-operators-9n2p9\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443576 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-catalog-content\") pod \"community-operators-kf2qs\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443622 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-utilities\") pod \"certified-operators-9n2p9\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443642 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-utilities\") pod \"community-operators-kf2qs\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443668 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p9kz\" (UniqueName: \"kubernetes.io/projected/7dd38562-2323-4992-a015-7ba42406c1b5-kube-api-access-2p9kz\") pod \"community-operators-kf2qs\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443678 4876 patch_prober.go:28] interesting pod/router-default-5444994796-p5nck container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 15 06:53:47 crc kubenswrapper[4876]: [-]has-synced failed: reason withheld Dec 15 06:53:47 crc kubenswrapper[4876]: [+]process-running ok Dec 15 06:53:47 crc kubenswrapper[4876]: healthz check failed Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443699 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8sln\" (UniqueName: \"kubernetes.io/projected/8c9a4d2a-d631-4257-8edd-82ef60db5de1-kube-api-access-g8sln\") pod \"certified-operators-9n2p9\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.443719 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5nck" podUID="631b168a-5a6c-4c8e-9854-52ab4c74d9a3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.444078 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:47.944063176 +0000 UTC m=+153.515206087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.445274 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-utilities\") pod \"community-operators-kf2qs\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.445424 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-catalog-content\") pod \"community-operators-kf2qs\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.495584 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p9kz\" (UniqueName: \"kubernetes.io/projected/7dd38562-2323-4992-a015-7ba42406c1b5-kube-api-access-2p9kz\") pod \"community-operators-kf2qs\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.544357 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.544541 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8sln\" (UniqueName: \"kubernetes.io/projected/8c9a4d2a-d631-4257-8edd-82ef60db5de1-kube-api-access-g8sln\") pod \"certified-operators-9n2p9\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.544652 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:48.044609124 +0000 UTC m=+153.615752035 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.544826 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.544882 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-catalog-content\") pod \"certified-operators-9n2p9\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.544977 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-utilities\") pod \"certified-operators-9n2p9\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.545072 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:48.045058957 +0000 UTC m=+153.616201868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.545490 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-catalog-content\") pod \"certified-operators-9n2p9\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.545534 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-utilities\") pod \"certified-operators-9n2p9\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.571759 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8sln\" (UniqueName: \"kubernetes.io/projected/8c9a4d2a-d631-4257-8edd-82ef60db5de1-kube-api-access-g8sln\") pod \"certified-operators-9n2p9\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.588087 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.619047 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p7mnt"] Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.620329 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.638372 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p7mnt"] Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.647411 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.647833 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-utilities\") pod \"community-operators-p7mnt\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.647875 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-catalog-content\") pod \"community-operators-p7mnt\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.647899 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg5hr\" (UniqueName: \"kubernetes.io/projected/95d0b586-aa73-44cc-8007-33b96ce899fa-kube-api-access-mg5hr\") pod \"community-operators-p7mnt\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.648013 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:48.147996372 +0000 UTC m=+153.719139283 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.751650 4876 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.752274 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.752379 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-utilities\") pod \"community-operators-p7mnt\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.752426 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-catalog-content\") pod \"community-operators-p7mnt\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.752457 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg5hr\" (UniqueName: \"kubernetes.io/projected/95d0b586-aa73-44cc-8007-33b96ce899fa-kube-api-access-mg5hr\") pod \"community-operators-p7mnt\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.757704 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-utilities\") pod \"community-operators-p7mnt\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.758577 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:48.25855471 +0000 UTC m=+153.829697621 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.759010 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-catalog-content\") pod \"community-operators-p7mnt\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.773443 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.784193 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg5hr\" (UniqueName: \"kubernetes.io/projected/95d0b586-aa73-44cc-8007-33b96ce899fa-kube-api-access-mg5hr\") pod \"community-operators-p7mnt\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.839225 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nbhbg"] Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.840528 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.855992 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.856850 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:48.356815694 +0000 UTC m=+153.927958605 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.857549 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zds85\" (UniqueName: \"kubernetes.io/projected/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-kube-api-access-zds85\") pod \"certified-operators-nbhbg\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.857608 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-utilities\") pod \"certified-operators-nbhbg\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.857681 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.857710 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-catalog-content\") pod \"certified-operators-nbhbg\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.858182 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-15 06:53:48.358162421 +0000 UTC m=+153.929305332 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-7k6p8" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.867823 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nbhbg"] Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.877429 4876 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-15T06:53:47.751692448Z","Handler":null,"Name":""} Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.958625 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.959021 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zds85\" (UniqueName: \"kubernetes.io/projected/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-kube-api-access-zds85\") pod \"certified-operators-nbhbg\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.959088 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-utilities\") pod \"certified-operators-nbhbg\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:47 crc kubenswrapper[4876]: E1215 06:53:47.959670 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-15 06:53:48.459629744 +0000 UTC m=+154.030772655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.960284 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-catalog-content\") pod \"certified-operators-nbhbg\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.960902 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-utilities\") pod \"certified-operators-nbhbg\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.961279 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-catalog-content\") pod \"certified-operators-nbhbg\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.965856 4876 generic.go:334] "Generic (PLEG): container finished" podID="df271eaa-2f40-4651-9942-5c6e535654d6" containerID="5be7d03bb9ea3495b836f4564d633dc2d3ffe84c283b160d677fb54cfc4f5096" exitCode=0 Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.966011 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" event={"ID":"df271eaa-2f40-4651-9942-5c6e535654d6","Type":"ContainerDied","Data":"5be7d03bb9ea3495b836f4564d633dc2d3ffe84c283b160d677fb54cfc4f5096"} Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.966690 4876 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.966743 4876 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 15 06:53:47 crc kubenswrapper[4876]: I1215 06:53:47.974526 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:47.996251 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zds85\" (UniqueName: \"kubernetes.io/projected/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-kube-api-access-zds85\") pod \"certified-operators-nbhbg\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:47.998652 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-72p8b" event={"ID":"239647d8-4ba9-475a-872b-2b308b1440da","Type":"ContainerStarted","Data":"cdc825102bfd155dd9cca4360e0f4045694633e0757a03c792ade922aca2ec3f"} Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:47.998690 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-72p8b" event={"ID":"239647d8-4ba9-475a-872b-2b308b1440da","Type":"ContainerStarted","Data":"bcc9d6ae0e2cd9928aeacf41783830b29d6dd68402be1f8c46b3e3de1a75f7ea"} Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.000176 4876 patch_prober.go:28] interesting pod/downloads-7954f5f757-cqvrf container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.000212 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cqvrf" podUID="edee3ad2-c1e0-4331-a30b-71e541f430af" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.001276 4876 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-kvwdp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.001345 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.33:8080/healthz\": dial tcp 10.217.0.33:8080: connect: connection refused" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.063202 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.075896 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.075948 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.111996 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kf2qs"] Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.160151 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-7k6p8\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.169005 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.181933 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.189903 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.195349 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.253337 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9n2p9"] Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.432660 4876 patch_prober.go:28] interesting pod/router-default-5444994796-p5nck container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 15 06:53:48 crc kubenswrapper[4876]: [-]has-synced failed: reason withheld Dec 15 06:53:48 crc kubenswrapper[4876]: [+]process-running ok Dec 15 06:53:48 crc kubenswrapper[4876]: healthz check failed Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.433066 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5nck" podUID="631b168a-5a6c-4c8e-9854-52ab4c74d9a3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.537505 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p7mnt"] Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.579577 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7k6p8"] Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.636679 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.637347 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.641363 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.641426 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.673650 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.689179 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.689228 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.698670 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d634c545-8f87-4051-977d-640669ebdc07-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d634c545-8f87-4051-977d-640669ebdc07\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.698868 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d634c545-8f87-4051-977d-640669ebdc07-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d634c545-8f87-4051-977d-640669ebdc07\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.708351 4876 patch_prober.go:28] interesting pod/console-f9d7485db-x6r8c container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.708417 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-x6r8c" podUID="4771573b-f753-4de4-bfb5-7fe3608a0b53" containerName="console" probeResult="failure" output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.725806 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.726505 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nbhbg"] Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.802712 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d634c545-8f87-4051-977d-640669ebdc07-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d634c545-8f87-4051-977d-640669ebdc07\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.802863 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d634c545-8f87-4051-977d-640669ebdc07-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d634c545-8f87-4051-977d-640669ebdc07\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.805442 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d634c545-8f87-4051-977d-640669ebdc07-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d634c545-8f87-4051-977d-640669ebdc07\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.836768 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d634c545-8f87-4051-977d-640669ebdc07-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d634c545-8f87-4051-977d-640669ebdc07\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.935576 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.935649 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.944177 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:48 crc kubenswrapper[4876]: I1215 06:53:48.997894 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.005125 4876 generic.go:334] "Generic (PLEG): container finished" podID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerID="e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897" exitCode=0 Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.005322 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9n2p9" event={"ID":"8c9a4d2a-d631-4257-8edd-82ef60db5de1","Type":"ContainerDied","Data":"e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.005393 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9n2p9" event={"ID":"8c9a4d2a-d631-4257-8edd-82ef60db5de1","Type":"ContainerStarted","Data":"3b3720638400a533a597891977fc5f6f2c8c2d234fd65a298eeb476043aea48e"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.006856 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.009741 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" event={"ID":"b3df8165-868f-49d9-a6ab-23aa9ce6e544","Type":"ContainerStarted","Data":"458ca23ba0d0f2c465dea7abbf94c15e9eeab6fec37c393a115b02e5b0f3f3b5"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.009780 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" event={"ID":"b3df8165-868f-49d9-a6ab-23aa9ce6e544","Type":"ContainerStarted","Data":"5209888b39bdd93b01b6dc40a0aa9c67a9e1822c867d59ea4f32b45bacd390f9"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.010327 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.012714 4876 generic.go:334] "Generic (PLEG): container finished" podID="7dd38562-2323-4992-a015-7ba42406c1b5" containerID="55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee" exitCode=0 Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.012761 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf2qs" event={"ID":"7dd38562-2323-4992-a015-7ba42406c1b5","Type":"ContainerDied","Data":"55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.012776 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf2qs" event={"ID":"7dd38562-2323-4992-a015-7ba42406c1b5","Type":"ContainerStarted","Data":"d505159b0db7e1a4f3ab46c559c6cfefed7f6eb19f9749b98b061f099bc83a06"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.018589 4876 generic.go:334] "Generic (PLEG): container finished" podID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerID="298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1" exitCode=0 Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.018693 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhbg" event={"ID":"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719","Type":"ContainerDied","Data":"298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.018718 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhbg" event={"ID":"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719","Type":"ContainerStarted","Data":"0f4ab9b1a13261b61abef46c677b6ab6a88689dc1d8778bf0e9d97f3cce535ba"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.031473 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-72p8b" event={"ID":"239647d8-4ba9-475a-872b-2b308b1440da","Type":"ContainerStarted","Data":"18eede5d9b55963e539161daa690196aa85c01fca7370f77db856984a9706383"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.034916 4876 generic.go:334] "Generic (PLEG): container finished" podID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerID="fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855" exitCode=0 Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.036686 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7mnt" event={"ID":"95d0b586-aa73-44cc-8007-33b96ce899fa","Type":"ContainerDied","Data":"fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.036737 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7mnt" event={"ID":"95d0b586-aa73-44cc-8007-33b96ce899fa","Type":"ContainerStarted","Data":"c5ca9de09bfa7a75f443af31f924554eb329bfe5b932616cbdb1063749e633bd"} Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.043550 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-kqs9w" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.060826 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" podStartSLOduration=131.060808494 podStartE2EDuration="2m11.060808494s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:49.05959589 +0000 UTC m=+154.630738801" watchObservedRunningTime="2025-12-15 06:53:49.060808494 +0000 UTC m=+154.631951405" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.222957 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2fgng"] Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.224162 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.228678 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.314230 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-72p8b" podStartSLOduration=13.314211055 podStartE2EDuration="13.314211055s" podCreationTimestamp="2025-12-15 06:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:49.262447644 +0000 UTC m=+154.833590555" watchObservedRunningTime="2025-12-15 06:53:49.314211055 +0000 UTC m=+154.885353966" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.315762 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-utilities\") pod \"redhat-marketplace-2fgng\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.315878 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-catalog-content\") pod \"redhat-marketplace-2fgng\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.315926 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmqss\" (UniqueName: \"kubernetes.io/projected/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-kube-api-access-vmqss\") pod \"redhat-marketplace-2fgng\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.316135 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2fgng"] Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.417557 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-catalog-content\") pod \"redhat-marketplace-2fgng\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.417899 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmqss\" (UniqueName: \"kubernetes.io/projected/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-kube-api-access-vmqss\") pod \"redhat-marketplace-2fgng\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.418001 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-utilities\") pod \"redhat-marketplace-2fgng\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.418597 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-utilities\") pod \"redhat-marketplace-2fgng\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.418628 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-catalog-content\") pod \"redhat-marketplace-2fgng\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.428727 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.434349 4876 patch_prober.go:28] interesting pod/router-default-5444994796-p5nck container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 15 06:53:49 crc kubenswrapper[4876]: [-]has-synced failed: reason withheld Dec 15 06:53:49 crc kubenswrapper[4876]: [+]process-running ok Dec 15 06:53:49 crc kubenswrapper[4876]: healthz check failed Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.434432 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-p5nck" podUID="631b168a-5a6c-4c8e-9854-52ab4c74d9a3" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.437092 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmqss\" (UniqueName: \"kubernetes.io/projected/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-kube-api-access-vmqss\") pod \"redhat-marketplace-2fgng\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.543912 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.596368 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.621312 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-c69j4"] Dec 15 06:53:49 crc kubenswrapper[4876]: E1215 06:53:49.621784 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df271eaa-2f40-4651-9942-5c6e535654d6" containerName="collect-profiles" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.621804 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="df271eaa-2f40-4651-9942-5c6e535654d6" containerName="collect-profiles" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.621993 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="df271eaa-2f40-4651-9942-5c6e535654d6" containerName="collect-profiles" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.623164 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.664301 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c69j4"] Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.676428 4876 patch_prober.go:28] interesting pod/downloads-7954f5f757-cqvrf container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.676508 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cqvrf" podUID="edee3ad2-c1e0-4331-a30b-71e541f430af" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.678465 4876 patch_prober.go:28] interesting pod/downloads-7954f5f757-cqvrf container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.678529 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-cqvrf" podUID="edee3ad2-c1e0-4331-a30b-71e541f430af" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.725434 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/df271eaa-2f40-4651-9942-5c6e535654d6-secret-volume\") pod \"df271eaa-2f40-4651-9942-5c6e535654d6\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.726269 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvb48\" (UniqueName: \"kubernetes.io/projected/df271eaa-2f40-4651-9942-5c6e535654d6-kube-api-access-bvb48\") pod \"df271eaa-2f40-4651-9942-5c6e535654d6\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.726701 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df271eaa-2f40-4651-9942-5c6e535654d6-config-volume\") pod \"df271eaa-2f40-4651-9942-5c6e535654d6\" (UID: \"df271eaa-2f40-4651-9942-5c6e535654d6\") " Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.727849 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fr89\" (UniqueName: \"kubernetes.io/projected/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-kube-api-access-6fr89\") pod \"redhat-marketplace-c69j4\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.727916 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-utilities\") pod \"redhat-marketplace-c69j4\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.727965 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-catalog-content\") pod \"redhat-marketplace-c69j4\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.728894 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df271eaa-2f40-4651-9942-5c6e535654d6-config-volume" (OuterVolumeSpecName: "config-volume") pod "df271eaa-2f40-4651-9942-5c6e535654d6" (UID: "df271eaa-2f40-4651-9942-5c6e535654d6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.732358 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df271eaa-2f40-4651-9942-5c6e535654d6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "df271eaa-2f40-4651-9942-5c6e535654d6" (UID: "df271eaa-2f40-4651-9942-5c6e535654d6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.752869 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df271eaa-2f40-4651-9942-5c6e535654d6-kube-api-access-bvb48" (OuterVolumeSpecName: "kube-api-access-bvb48") pod "df271eaa-2f40-4651-9942-5c6e535654d6" (UID: "df271eaa-2f40-4651-9942-5c6e535654d6"). InnerVolumeSpecName "kube-api-access-bvb48". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.776726 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.780366 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:53:49 crc kubenswrapper[4876]: W1215 06:53:49.800976 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podd634c545_8f87_4051_977d_640669ebdc07.slice/crio-dbb70fda4253481a331d8027c6b36b8175cd767a323dd693de11e9acb894c2f8 WatchSource:0}: Error finding container dbb70fda4253481a331d8027c6b36b8175cd767a323dd693de11e9acb894c2f8: Status 404 returned error can't find the container with id dbb70fda4253481a331d8027c6b36b8175cd767a323dd693de11e9acb894c2f8 Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.803441 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2fgng"] Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.828717 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-catalog-content\") pod \"redhat-marketplace-c69j4\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.828873 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fr89\" (UniqueName: \"kubernetes.io/projected/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-kube-api-access-6fr89\") pod \"redhat-marketplace-c69j4\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.828947 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-utilities\") pod \"redhat-marketplace-c69j4\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.829030 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvb48\" (UniqueName: \"kubernetes.io/projected/df271eaa-2f40-4651-9942-5c6e535654d6-kube-api-access-bvb48\") on node \"crc\" DevicePath \"\"" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.829047 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df271eaa-2f40-4651-9942-5c6e535654d6-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.829060 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/df271eaa-2f40-4651-9942-5c6e535654d6-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.829235 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-catalog-content\") pod \"redhat-marketplace-c69j4\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.829536 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-utilities\") pod \"redhat-marketplace-c69j4\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.844235 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fr89\" (UniqueName: \"kubernetes.io/projected/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-kube-api-access-6fr89\") pod \"redhat-marketplace-c69j4\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:49 crc kubenswrapper[4876]: I1215 06:53:49.959282 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.064533 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d634c545-8f87-4051-977d-640669ebdc07","Type":"ContainerStarted","Data":"dbb70fda4253481a331d8027c6b36b8175cd767a323dd693de11e9acb894c2f8"} Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.070586 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2fgng" event={"ID":"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f","Type":"ContainerStarted","Data":"5de2b97a8f5bf47d322d07671df9d5675257dda084611ebe4e6505350b92c08a"} Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.075980 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" event={"ID":"df271eaa-2f40-4651-9942-5c6e535654d6","Type":"ContainerDied","Data":"3f5a29d4a02007907f41536500e888fba3ff524f1c00a798df119bc7f88ef29b"} Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.076019 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f5a29d4a02007907f41536500e888fba3ff524f1c00a798df119bc7f88ef29b" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.076158 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.331082 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c69j4"] Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.441467 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4bwjm"] Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.442721 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.445185 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.449942 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.457270 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-p5nck" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.461669 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4bwjm"] Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.551267 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-catalog-content\") pod \"redhat-operators-4bwjm\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.551420 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb5r7\" (UniqueName: \"kubernetes.io/projected/2b53b1ee-b695-4bd7-b591-e5bc2c731614-kube-api-access-vb5r7\") pod \"redhat-operators-4bwjm\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.551501 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-utilities\") pod \"redhat-operators-4bwjm\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.654955 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-catalog-content\") pod \"redhat-operators-4bwjm\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.654994 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb5r7\" (UniqueName: \"kubernetes.io/projected/2b53b1ee-b695-4bd7-b591-e5bc2c731614-kube-api-access-vb5r7\") pod \"redhat-operators-4bwjm\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.655042 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-utilities\") pod \"redhat-operators-4bwjm\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.655550 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-utilities\") pod \"redhat-operators-4bwjm\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.657133 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-catalog-content\") pod \"redhat-operators-4bwjm\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.696845 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb5r7\" (UniqueName: \"kubernetes.io/projected/2b53b1ee-b695-4bd7-b591-e5bc2c731614-kube-api-access-vb5r7\") pod \"redhat-operators-4bwjm\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.811340 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bgm7t"] Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.812651 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.826617 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bgm7t"] Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.827574 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.959354 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-utilities\") pod \"redhat-operators-bgm7t\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.959876 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x84vd\" (UniqueName: \"kubernetes.io/projected/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-kube-api-access-x84vd\") pod \"redhat-operators-bgm7t\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:50 crc kubenswrapper[4876]: I1215 06:53:50.959919 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-catalog-content\") pod \"redhat-operators-bgm7t\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.062245 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-utilities\") pod \"redhat-operators-bgm7t\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.062352 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x84vd\" (UniqueName: \"kubernetes.io/projected/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-kube-api-access-x84vd\") pod \"redhat-operators-bgm7t\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.062397 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-catalog-content\") pod \"redhat-operators-bgm7t\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.062940 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-catalog-content\") pod \"redhat-operators-bgm7t\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.084754 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-utilities\") pod \"redhat-operators-bgm7t\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.098008 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x84vd\" (UniqueName: \"kubernetes.io/projected/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-kube-api-access-x84vd\") pod \"redhat-operators-bgm7t\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.136418 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.138548 4876 generic.go:334] "Generic (PLEG): container finished" podID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerID="c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde" exitCode=0 Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.138727 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2fgng" event={"ID":"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f","Type":"ContainerDied","Data":"c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde"} Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.166743 4876 generic.go:334] "Generic (PLEG): container finished" podID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerID="56fc1a2c5bc37981ae7f1b42da098c8b3176211032904bf781a9a8cf417ec047" exitCode=0 Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.166868 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c69j4" event={"ID":"cbc129a2-a1bc-42b7-a431-55ca7eb91d23","Type":"ContainerDied","Data":"56fc1a2c5bc37981ae7f1b42da098c8b3176211032904bf781a9a8cf417ec047"} Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.166934 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c69j4" event={"ID":"cbc129a2-a1bc-42b7-a431-55ca7eb91d23","Type":"ContainerStarted","Data":"86f9b53b25b41428acc746415c272391d6abf347b86cfb587387a5213cbfa581"} Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.174937 4876 generic.go:334] "Generic (PLEG): container finished" podID="d634c545-8f87-4051-977d-640669ebdc07" containerID="1d0b05caae318db1e641a43acfd1a5c7e27ef363810866fb974b41e054be8dd2" exitCode=0 Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.175427 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d634c545-8f87-4051-977d-640669ebdc07","Type":"ContainerDied","Data":"1d0b05caae318db1e641a43acfd1a5c7e27ef363810866fb974b41e054be8dd2"} Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.402173 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4bwjm"] Dec 15 06:53:51 crc kubenswrapper[4876]: I1215 06:53:51.648889 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bgm7t"] Dec 15 06:53:51 crc kubenswrapper[4876]: W1215 06:53:51.703022 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a81db39_a0c0_4be6_8e60_73fdf457b8b9.slice/crio-1fc3b249ded6d4c9284351de17415c3608a189c6508d34da9de5f21ffe54b5ca WatchSource:0}: Error finding container 1fc3b249ded6d4c9284351de17415c3608a189c6508d34da9de5f21ffe54b5ca: Status 404 returned error can't find the container with id 1fc3b249ded6d4c9284351de17415c3608a189c6508d34da9de5f21ffe54b5ca Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.251042 4876 generic.go:334] "Generic (PLEG): container finished" podID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerID="4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf" exitCode=0 Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.251185 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4bwjm" event={"ID":"2b53b1ee-b695-4bd7-b591-e5bc2c731614","Type":"ContainerDied","Data":"4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf"} Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.251659 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4bwjm" event={"ID":"2b53b1ee-b695-4bd7-b591-e5bc2c731614","Type":"ContainerStarted","Data":"30595d45ec5818462cd940617891d5fa86e9422f8536bc0c3f2e2ebebe7bc678"} Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.263565 4876 generic.go:334] "Generic (PLEG): container finished" podID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerID="fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30" exitCode=0 Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.265624 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bgm7t" event={"ID":"9a81db39-a0c0-4be6-8e60-73fdf457b8b9","Type":"ContainerDied","Data":"fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30"} Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.265717 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bgm7t" event={"ID":"9a81db39-a0c0-4be6-8e60-73fdf457b8b9","Type":"ContainerStarted","Data":"1fc3b249ded6d4c9284351de17415c3608a189c6508d34da9de5f21ffe54b5ca"} Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.608223 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.709889 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d634c545-8f87-4051-977d-640669ebdc07-kube-api-access\") pod \"d634c545-8f87-4051-977d-640669ebdc07\" (UID: \"d634c545-8f87-4051-977d-640669ebdc07\") " Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.710563 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d634c545-8f87-4051-977d-640669ebdc07-kubelet-dir\") pod \"d634c545-8f87-4051-977d-640669ebdc07\" (UID: \"d634c545-8f87-4051-977d-640669ebdc07\") " Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.710692 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d634c545-8f87-4051-977d-640669ebdc07-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d634c545-8f87-4051-977d-640669ebdc07" (UID: "d634c545-8f87-4051-977d-640669ebdc07"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.711955 4876 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d634c545-8f87-4051-977d-640669ebdc07-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.730987 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d634c545-8f87-4051-977d-640669ebdc07-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d634c545-8f87-4051-977d-640669ebdc07" (UID: "d634c545-8f87-4051-977d-640669ebdc07"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:53:52 crc kubenswrapper[4876]: I1215 06:53:52.813407 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d634c545-8f87-4051-977d-640669ebdc07-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 15 06:53:53 crc kubenswrapper[4876]: I1215 06:53:53.280636 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"d634c545-8f87-4051-977d-640669ebdc07","Type":"ContainerDied","Data":"dbb70fda4253481a331d8027c6b36b8175cd767a323dd693de11e9acb894c2f8"} Dec 15 06:53:53 crc kubenswrapper[4876]: I1215 06:53:53.280680 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbb70fda4253481a331d8027c6b36b8175cd767a323dd693de11e9acb894c2f8" Dec 15 06:53:53 crc kubenswrapper[4876]: I1215 06:53:53.280761 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 15 06:53:54 crc kubenswrapper[4876]: I1215 06:53:54.896602 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 15 06:53:54 crc kubenswrapper[4876]: E1215 06:53:54.896854 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d634c545-8f87-4051-977d-640669ebdc07" containerName="pruner" Dec 15 06:53:54 crc kubenswrapper[4876]: I1215 06:53:54.896865 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d634c545-8f87-4051-977d-640669ebdc07" containerName="pruner" Dec 15 06:53:54 crc kubenswrapper[4876]: I1215 06:53:54.896966 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d634c545-8f87-4051-977d-640669ebdc07" containerName="pruner" Dec 15 06:53:54 crc kubenswrapper[4876]: I1215 06:53:54.897364 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 15 06:53:54 crc kubenswrapper[4876]: I1215 06:53:54.909495 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 15 06:53:54 crc kubenswrapper[4876]: I1215 06:53:54.914652 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 15 06:53:54 crc kubenswrapper[4876]: I1215 06:53:54.914867 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 15 06:53:55 crc kubenswrapper[4876]: I1215 06:53:55.050754 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f9a1fbb6-f240-4b99-b055-083818bf54f3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f9a1fbb6-f240-4b99-b055-083818bf54f3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 15 06:53:55 crc kubenswrapper[4876]: I1215 06:53:55.050818 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9a1fbb6-f240-4b99-b055-083818bf54f3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f9a1fbb6-f240-4b99-b055-083818bf54f3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 15 06:53:55 crc kubenswrapper[4876]: I1215 06:53:55.153952 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f9a1fbb6-f240-4b99-b055-083818bf54f3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f9a1fbb6-f240-4b99-b055-083818bf54f3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 15 06:53:55 crc kubenswrapper[4876]: I1215 06:53:55.154031 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9a1fbb6-f240-4b99-b055-083818bf54f3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f9a1fbb6-f240-4b99-b055-083818bf54f3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 15 06:53:55 crc kubenswrapper[4876]: I1215 06:53:55.157236 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f9a1fbb6-f240-4b99-b055-083818bf54f3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f9a1fbb6-f240-4b99-b055-083818bf54f3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 15 06:53:55 crc kubenswrapper[4876]: I1215 06:53:55.179564 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9a1fbb6-f240-4b99-b055-083818bf54f3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f9a1fbb6-f240-4b99-b055-083818bf54f3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 15 06:53:55 crc kubenswrapper[4876]: I1215 06:53:55.238688 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 15 06:53:55 crc kubenswrapper[4876]: I1215 06:53:55.609555 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 15 06:53:55 crc kubenswrapper[4876]: W1215 06:53:55.650875 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf9a1fbb6_f240_4b99_b055_083818bf54f3.slice/crio-44f8f79b2ce42868c34c87913e54e58d9e766cd6148db519e352415104554d6e WatchSource:0}: Error finding container 44f8f79b2ce42868c34c87913e54e58d9e766cd6148db519e352415104554d6e: Status 404 returned error can't find the container with id 44f8f79b2ce42868c34c87913e54e58d9e766cd6148db519e352415104554d6e Dec 15 06:53:56 crc kubenswrapper[4876]: I1215 06:53:56.358799 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f9a1fbb6-f240-4b99-b055-083818bf54f3","Type":"ContainerStarted","Data":"44f8f79b2ce42868c34c87913e54e58d9e766cd6148db519e352415104554d6e"} Dec 15 06:53:57 crc kubenswrapper[4876]: I1215 06:53:57.322995 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 06:53:57 crc kubenswrapper[4876]: I1215 06:53:57.323058 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 06:53:57 crc kubenswrapper[4876]: I1215 06:53:57.428783 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f9a1fbb6-f240-4b99-b055-083818bf54f3","Type":"ContainerStarted","Data":"96aafbb10ec2c5758e82240b17f9b67bec7a2fcd969a87cbf93ebc09e101e103"} Dec 15 06:53:57 crc kubenswrapper[4876]: I1215 06:53:57.456956 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.456921762 podStartE2EDuration="3.456921762s" podCreationTimestamp="2025-12-15 06:53:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:53:57.455580845 +0000 UTC m=+163.026723776" watchObservedRunningTime="2025-12-15 06:53:57.456921762 +0000 UTC m=+163.028064673" Dec 15 06:53:57 crc kubenswrapper[4876]: I1215 06:53:57.932864 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-qxz8x" Dec 15 06:53:58 crc kubenswrapper[4876]: I1215 06:53:58.452255 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9a1fbb6-f240-4b99-b055-083818bf54f3" containerID="96aafbb10ec2c5758e82240b17f9b67bec7a2fcd969a87cbf93ebc09e101e103" exitCode=0 Dec 15 06:53:58 crc kubenswrapper[4876]: I1215 06:53:58.452428 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f9a1fbb6-f240-4b99-b055-083818bf54f3","Type":"ContainerDied","Data":"96aafbb10ec2c5758e82240b17f9b67bec7a2fcd969a87cbf93ebc09e101e103"} Dec 15 06:53:58 crc kubenswrapper[4876]: I1215 06:53:58.660688 4876 patch_prober.go:28] interesting pod/console-f9d7485db-x6r8c container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Dec 15 06:53:58 crc kubenswrapper[4876]: I1215 06:53:58.662239 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-x6r8c" podUID="4771573b-f753-4de4-bfb5-7fe3608a0b53" containerName="console" probeResult="failure" output="Get \"https://10.217.0.9:8443/health\": dial tcp 10.217.0.9:8443: connect: connection refused" Dec 15 06:53:59 crc kubenswrapper[4876]: I1215 06:53:59.706385 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-cqvrf" Dec 15 06:54:01 crc kubenswrapper[4876]: I1215 06:54:01.104264 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:54:01 crc kubenswrapper[4876]: I1215 06:54:01.113505 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/daee20e1-a017-4464-9626-ea2c52cfae57-metrics-certs\") pod \"network-metrics-daemon-rzth5\" (UID: \"daee20e1-a017-4464-9626-ea2c52cfae57\") " pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:54:01 crc kubenswrapper[4876]: I1215 06:54:01.230529 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rzth5" Dec 15 06:54:07 crc kubenswrapper[4876]: I1215 06:54:07.079298 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 15 06:54:07 crc kubenswrapper[4876]: I1215 06:54:07.114018 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9a1fbb6-f240-4b99-b055-083818bf54f3-kube-api-access\") pod \"f9a1fbb6-f240-4b99-b055-083818bf54f3\" (UID: \"f9a1fbb6-f240-4b99-b055-083818bf54f3\") " Dec 15 06:54:07 crc kubenswrapper[4876]: I1215 06:54:07.114211 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f9a1fbb6-f240-4b99-b055-083818bf54f3-kubelet-dir\") pod \"f9a1fbb6-f240-4b99-b055-083818bf54f3\" (UID: \"f9a1fbb6-f240-4b99-b055-083818bf54f3\") " Dec 15 06:54:07 crc kubenswrapper[4876]: I1215 06:54:07.114521 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f9a1fbb6-f240-4b99-b055-083818bf54f3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f9a1fbb6-f240-4b99-b055-083818bf54f3" (UID: "f9a1fbb6-f240-4b99-b055-083818bf54f3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:54:07 crc kubenswrapper[4876]: I1215 06:54:07.119554 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a1fbb6-f240-4b99-b055-083818bf54f3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f9a1fbb6-f240-4b99-b055-083818bf54f3" (UID: "f9a1fbb6-f240-4b99-b055-083818bf54f3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:54:07 crc kubenswrapper[4876]: I1215 06:54:07.216846 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9a1fbb6-f240-4b99-b055-083818bf54f3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:07 crc kubenswrapper[4876]: I1215 06:54:07.216898 4876 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f9a1fbb6-f240-4b99-b055-083818bf54f3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:07 crc kubenswrapper[4876]: I1215 06:54:07.591683 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f9a1fbb6-f240-4b99-b055-083818bf54f3","Type":"ContainerDied","Data":"44f8f79b2ce42868c34c87913e54e58d9e766cd6148db519e352415104554d6e"} Dec 15 06:54:07 crc kubenswrapper[4876]: I1215 06:54:07.591758 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44f8f79b2ce42868c34c87913e54e58d9e766cd6148db519e352415104554d6e" Dec 15 06:54:07 crc kubenswrapper[4876]: I1215 06:54:07.591799 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 15 06:54:08 crc kubenswrapper[4876]: I1215 06:54:08.206647 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:54:08 crc kubenswrapper[4876]: I1215 06:54:08.667372 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:54:08 crc kubenswrapper[4876]: I1215 06:54:08.673810 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 06:54:19 crc kubenswrapper[4876]: I1215 06:54:19.586879 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x277g" Dec 15 06:54:20 crc kubenswrapper[4876]: I1215 06:54:20.845037 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 15 06:54:21 crc kubenswrapper[4876]: I1215 06:54:21.070599 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6pnt6"] Dec 15 06:54:21 crc kubenswrapper[4876]: I1215 06:54:21.070834 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" podUID="b020cb85-67cf-46ad-8909-823c4a13376e" containerName="controller-manager" containerID="cri-o://8ff9fa9d97d310380259908ad9942a2dedfd1900b5977b7d12c3466779bbe18b" gracePeriod=30 Dec 15 06:54:21 crc kubenswrapper[4876]: I1215 06:54:21.168760 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh"] Dec 15 06:54:21 crc kubenswrapper[4876]: I1215 06:54:21.169222 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" podUID="ca3c2acc-5299-4f75-a891-e018580e62ee" containerName="route-controller-manager" containerID="cri-o://2fe889aed73de84debf48f895bda87b0217571fc821e9cd823979b00eefbcefb" gracePeriod=30 Dec 15 06:54:21 crc kubenswrapper[4876]: I1215 06:54:21.675352 4876 generic.go:334] "Generic (PLEG): container finished" podID="b020cb85-67cf-46ad-8909-823c4a13376e" containerID="8ff9fa9d97d310380259908ad9942a2dedfd1900b5977b7d12c3466779bbe18b" exitCode=0 Dec 15 06:54:21 crc kubenswrapper[4876]: I1215 06:54:21.675408 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" event={"ID":"b020cb85-67cf-46ad-8909-823c4a13376e","Type":"ContainerDied","Data":"8ff9fa9d97d310380259908ad9942a2dedfd1900b5977b7d12c3466779bbe18b"} Dec 15 06:54:21 crc kubenswrapper[4876]: I1215 06:54:21.677011 4876 generic.go:334] "Generic (PLEG): container finished" podID="ca3c2acc-5299-4f75-a891-e018580e62ee" containerID="2fe889aed73de84debf48f895bda87b0217571fc821e9cd823979b00eefbcefb" exitCode=0 Dec 15 06:54:21 crc kubenswrapper[4876]: I1215 06:54:21.677036 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" event={"ID":"ca3c2acc-5299-4f75-a891-e018580e62ee","Type":"ContainerDied","Data":"2fe889aed73de84debf48f895bda87b0217571fc821e9cd823979b00eefbcefb"} Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.499841 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.500657 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mg5hr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-p7mnt_openshift-marketplace(95d0b586-aa73-44cc-8007-33b96ce899fa): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.501905 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-p7mnt" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.507574 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.507738 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2p9kz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-kf2qs_openshift-marketplace(7dd38562-2323-4992-a015-7ba42406c1b5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.508999 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-kf2qs" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.582024 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.582470 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6fr89,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-c69j4_openshift-marketplace(cbc129a2-a1bc-42b7-a431-55ca7eb91d23): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.583925 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-c69j4" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.606487 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.606652 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x84vd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-bgm7t_openshift-marketplace(9a81db39-a0c0-4be6-8e60-73fdf457b8b9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.607924 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-bgm7t" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.644722 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.644901 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g8sln,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-9n2p9_openshift-marketplace(8c9a4d2a-d631-4257-8edd-82ef60db5de1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.647213 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-9n2p9" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.692493 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-kf2qs" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.695278 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-bgm7t" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.696280 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-9n2p9" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.696347 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-p7mnt" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.696573 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-c69j4" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.745210 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.749273 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.804222 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn"] Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.804476 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca3c2acc-5299-4f75-a891-e018580e62ee" containerName="route-controller-manager" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.804492 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca3c2acc-5299-4f75-a891-e018580e62ee" containerName="route-controller-manager" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.804503 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b020cb85-67cf-46ad-8909-823c4a13376e" containerName="controller-manager" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.804512 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b020cb85-67cf-46ad-8909-823c4a13376e" containerName="controller-manager" Dec 15 06:54:24 crc kubenswrapper[4876]: E1215 06:54:24.804530 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a1fbb6-f240-4b99-b055-083818bf54f3" containerName="pruner" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.804540 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a1fbb6-f240-4b99-b055-083818bf54f3" containerName="pruner" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.804657 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a1fbb6-f240-4b99-b055-083818bf54f3" containerName="pruner" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.804670 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca3c2acc-5299-4f75-a891-e018580e62ee" containerName="route-controller-manager" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.804682 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b020cb85-67cf-46ad-8909-823c4a13376e" containerName="controller-manager" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.805128 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.820919 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn"] Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.914829 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-config\") pod \"ca3c2acc-5299-4f75-a891-e018580e62ee\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915282 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3c2acc-5299-4f75-a891-e018580e62ee-serving-cert\") pod \"ca3c2acc-5299-4f75-a891-e018580e62ee\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915334 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7cwx\" (UniqueName: \"kubernetes.io/projected/ca3c2acc-5299-4f75-a891-e018580e62ee-kube-api-access-w7cwx\") pod \"ca3c2acc-5299-4f75-a891-e018580e62ee\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915358 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b020cb85-67cf-46ad-8909-823c4a13376e-serving-cert\") pod \"b020cb85-67cf-46ad-8909-823c4a13376e\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915376 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-config\") pod \"b020cb85-67cf-46ad-8909-823c4a13376e\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915415 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-client-ca\") pod \"b020cb85-67cf-46ad-8909-823c4a13376e\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915443 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-client-ca\") pod \"ca3c2acc-5299-4f75-a891-e018580e62ee\" (UID: \"ca3c2acc-5299-4f75-a891-e018580e62ee\") " Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915479 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8c2cb\" (UniqueName: \"kubernetes.io/projected/b020cb85-67cf-46ad-8909-823c4a13376e-kube-api-access-8c2cb\") pod \"b020cb85-67cf-46ad-8909-823c4a13376e\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915531 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-proxy-ca-bundles\") pod \"b020cb85-67cf-46ad-8909-823c4a13376e\" (UID: \"b020cb85-67cf-46ad-8909-823c4a13376e\") " Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915788 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/300af5af-5e2a-4109-ba2c-82f2fb20f62b-serving-cert\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915816 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-config\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915863 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-client-ca\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.915890 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrpm9\" (UniqueName: \"kubernetes.io/projected/300af5af-5e2a-4109-ba2c-82f2fb20f62b-kube-api-access-nrpm9\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.916809 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-client-ca" (OuterVolumeSpecName: "client-ca") pod "b020cb85-67cf-46ad-8909-823c4a13376e" (UID: "b020cb85-67cf-46ad-8909-823c4a13376e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.916976 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-config" (OuterVolumeSpecName: "config") pod "ca3c2acc-5299-4f75-a891-e018580e62ee" (UID: "ca3c2acc-5299-4f75-a891-e018580e62ee"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.917017 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-client-ca" (OuterVolumeSpecName: "client-ca") pod "ca3c2acc-5299-4f75-a891-e018580e62ee" (UID: "ca3c2acc-5299-4f75-a891-e018580e62ee"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.917552 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-config" (OuterVolumeSpecName: "config") pod "b020cb85-67cf-46ad-8909-823c4a13376e" (UID: "b020cb85-67cf-46ad-8909-823c4a13376e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.918779 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b020cb85-67cf-46ad-8909-823c4a13376e" (UID: "b020cb85-67cf-46ad-8909-823c4a13376e"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.922273 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca3c2acc-5299-4f75-a891-e018580e62ee-kube-api-access-w7cwx" (OuterVolumeSpecName: "kube-api-access-w7cwx") pod "ca3c2acc-5299-4f75-a891-e018580e62ee" (UID: "ca3c2acc-5299-4f75-a891-e018580e62ee"). InnerVolumeSpecName "kube-api-access-w7cwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.923530 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca3c2acc-5299-4f75-a891-e018580e62ee-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ca3c2acc-5299-4f75-a891-e018580e62ee" (UID: "ca3c2acc-5299-4f75-a891-e018580e62ee"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.923572 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b020cb85-67cf-46ad-8909-823c4a13376e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b020cb85-67cf-46ad-8909-823c4a13376e" (UID: "b020cb85-67cf-46ad-8909-823c4a13376e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.923709 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rzth5"] Dec 15 06:54:24 crc kubenswrapper[4876]: I1215 06:54:24.923971 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b020cb85-67cf-46ad-8909-823c4a13376e-kube-api-access-8c2cb" (OuterVolumeSpecName: "kube-api-access-8c2cb") pod "b020cb85-67cf-46ad-8909-823c4a13376e" (UID: "b020cb85-67cf-46ad-8909-823c4a13376e"). InnerVolumeSpecName "kube-api-access-8c2cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.017469 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-config\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.017560 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-client-ca\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.017590 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrpm9\" (UniqueName: \"kubernetes.io/projected/300af5af-5e2a-4109-ba2c-82f2fb20f62b-kube-api-access-nrpm9\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.017648 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/300af5af-5e2a-4109-ba2c-82f2fb20f62b-serving-cert\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.019029 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-config\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.018556 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.019683 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8c2cb\" (UniqueName: \"kubernetes.io/projected/b020cb85-67cf-46ad-8909-823c4a13376e-kube-api-access-8c2cb\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.019701 4876 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.019717 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca3c2acc-5299-4f75-a891-e018580e62ee-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.019729 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca3c2acc-5299-4f75-a891-e018580e62ee-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.019760 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7cwx\" (UniqueName: \"kubernetes.io/projected/ca3c2acc-5299-4f75-a891-e018580e62ee-kube-api-access-w7cwx\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.019772 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b020cb85-67cf-46ad-8909-823c4a13376e-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.019775 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-client-ca\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.019795 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.019892 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b020cb85-67cf-46ad-8909-823c4a13376e-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.023692 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/300af5af-5e2a-4109-ba2c-82f2fb20f62b-serving-cert\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.034957 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrpm9\" (UniqueName: \"kubernetes.io/projected/300af5af-5e2a-4109-ba2c-82f2fb20f62b-kube-api-access-nrpm9\") pod \"route-controller-manager-7fdd4fd4f8-q7kfn\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.121811 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.378371 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn"] Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.701383 4876 generic.go:334] "Generic (PLEG): container finished" podID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerID="8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc" exitCode=0 Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.701524 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4bwjm" event={"ID":"2b53b1ee-b695-4bd7-b591-e5bc2c731614","Type":"ContainerDied","Data":"8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc"} Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.704579 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" event={"ID":"300af5af-5e2a-4109-ba2c-82f2fb20f62b","Type":"ContainerStarted","Data":"58cfc5b2e17ab6798fe951f6f082a2eaad665de5db184934f0fdd146803fe7c9"} Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.704621 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" event={"ID":"300af5af-5e2a-4109-ba2c-82f2fb20f62b","Type":"ContainerStarted","Data":"cbc7277909b45f2cb5bab56ef802522d5256c1cbbf3a2d12515413ab0bc29e9c"} Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.705435 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.708941 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rzth5" event={"ID":"daee20e1-a017-4464-9626-ea2c52cfae57","Type":"ContainerStarted","Data":"dcf25b1027d6368c45c7c70678ea6ad8423314b7d34f06d40ac658f7f8c194e0"} Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.708999 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rzth5" event={"ID":"daee20e1-a017-4464-9626-ea2c52cfae57","Type":"ContainerStarted","Data":"024b68312da1dbdb3e4a93dc303805c1c33a2003172dcbb465c2cb4937e777ac"} Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.709020 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rzth5" event={"ID":"daee20e1-a017-4464-9626-ea2c52cfae57","Type":"ContainerStarted","Data":"4691dc33ff7f14a59667f7ef0bf48e2462db2f3d286b09be08191114ce8115cc"} Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.711968 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" event={"ID":"ca3c2acc-5299-4f75-a891-e018580e62ee","Type":"ContainerDied","Data":"cce4e74467fd38f4408a9952b643a313cb9115f140de0ccbc62f42303fcaafa5"} Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.712027 4876 scope.go:117] "RemoveContainer" containerID="2fe889aed73de84debf48f895bda87b0217571fc821e9cd823979b00eefbcefb" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.712243 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.715325 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" event={"ID":"b020cb85-67cf-46ad-8909-823c4a13376e","Type":"ContainerDied","Data":"6f1af8ed4a408f02464ac1edd63e1d26f3a54c441c392a71981c8e66871a09a1"} Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.715369 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6pnt6" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.725315 4876 generic.go:334] "Generic (PLEG): container finished" podID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerID="76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44" exitCode=0 Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.725408 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhbg" event={"ID":"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719","Type":"ContainerDied","Data":"76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44"} Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.735017 4876 generic.go:334] "Generic (PLEG): container finished" podID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerID="70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a" exitCode=0 Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.735083 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2fgng" event={"ID":"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f","Type":"ContainerDied","Data":"70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a"} Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.747713 4876 scope.go:117] "RemoveContainer" containerID="8ff9fa9d97d310380259908ad9942a2dedfd1900b5977b7d12c3466779bbe18b" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.758904 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-rzth5" podStartSLOduration=167.758883796 podStartE2EDuration="2m47.758883796s" podCreationTimestamp="2025-12-15 06:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:54:25.755732808 +0000 UTC m=+191.326875719" watchObservedRunningTime="2025-12-15 06:54:25.758883796 +0000 UTC m=+191.330026707" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.803764 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" podStartSLOduration=4.8037473330000005 podStartE2EDuration="4.803747333s" podCreationTimestamp="2025-12-15 06:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:54:25.801885341 +0000 UTC m=+191.373028262" watchObservedRunningTime="2025-12-15 06:54:25.803747333 +0000 UTC m=+191.374890264" Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.821213 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh"] Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.825081 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-ww7lh"] Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.831129 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6pnt6"] Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.835231 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6pnt6"] Dec 15 06:54:25 crc kubenswrapper[4876]: I1215 06:54:25.990006 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:26 crc kubenswrapper[4876]: I1215 06:54:26.714413 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b020cb85-67cf-46ad-8909-823c4a13376e" path="/var/lib/kubelet/pods/b020cb85-67cf-46ad-8909-823c4a13376e/volumes" Dec 15 06:54:26 crc kubenswrapper[4876]: I1215 06:54:26.715520 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca3c2acc-5299-4f75-a891-e018580e62ee" path="/var/lib/kubelet/pods/ca3c2acc-5299-4f75-a891-e018580e62ee/volumes" Dec 15 06:54:26 crc kubenswrapper[4876]: I1215 06:54:26.749918 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4bwjm" event={"ID":"2b53b1ee-b695-4bd7-b591-e5bc2c731614","Type":"ContainerStarted","Data":"9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87"} Dec 15 06:54:26 crc kubenswrapper[4876]: I1215 06:54:26.755355 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhbg" event={"ID":"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719","Type":"ContainerStarted","Data":"00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b"} Dec 15 06:54:26 crc kubenswrapper[4876]: I1215 06:54:26.758443 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2fgng" event={"ID":"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f","Type":"ContainerStarted","Data":"c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9"} Dec 15 06:54:26 crc kubenswrapper[4876]: I1215 06:54:26.773373 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4bwjm" podStartSLOduration=2.905097194 podStartE2EDuration="36.773345094s" podCreationTimestamp="2025-12-15 06:53:50 +0000 UTC" firstStartedPulling="2025-12-15 06:53:52.265631425 +0000 UTC m=+157.836774336" lastFinishedPulling="2025-12-15 06:54:26.133879325 +0000 UTC m=+191.705022236" observedRunningTime="2025-12-15 06:54:26.770032672 +0000 UTC m=+192.341175583" watchObservedRunningTime="2025-12-15 06:54:26.773345094 +0000 UTC m=+192.344488005" Dec 15 06:54:26 crc kubenswrapper[4876]: I1215 06:54:26.791597 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2fgng" podStartSLOduration=2.751653381 podStartE2EDuration="37.791571576s" podCreationTimestamp="2025-12-15 06:53:49 +0000 UTC" firstStartedPulling="2025-12-15 06:53:51.164683811 +0000 UTC m=+156.735826722" lastFinishedPulling="2025-12-15 06:54:26.204602006 +0000 UTC m=+191.775744917" observedRunningTime="2025-12-15 06:54:26.788794138 +0000 UTC m=+192.359937049" watchObservedRunningTime="2025-12-15 06:54:26.791571576 +0000 UTC m=+192.362714487" Dec 15 06:54:26 crc kubenswrapper[4876]: I1215 06:54:26.807475 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nbhbg" podStartSLOduration=2.567991997 podStartE2EDuration="39.807454061s" podCreationTimestamp="2025-12-15 06:53:47 +0000 UTC" firstStartedPulling="2025-12-15 06:53:49.019628519 +0000 UTC m=+154.590771430" lastFinishedPulling="2025-12-15 06:54:26.259090583 +0000 UTC m=+191.830233494" observedRunningTime="2025-12-15 06:54:26.805839636 +0000 UTC m=+192.376982557" watchObservedRunningTime="2025-12-15 06:54:26.807454061 +0000 UTC m=+192.378596972" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.031072 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-86457fbb69-9gsqs"] Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.031774 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.034343 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.034391 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.034412 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.034627 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.034716 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.034786 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.040480 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.050251 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl9sb\" (UniqueName: \"kubernetes.io/projected/8c80d2a8-36b4-455c-9d86-e8370e521199-kube-api-access-wl9sb\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.050315 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-client-ca\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.050347 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c80d2a8-36b4-455c-9d86-e8370e521199-serving-cert\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.050421 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-proxy-ca-bundles\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.050460 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-config\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.050777 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86457fbb69-9gsqs"] Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.151460 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl9sb\" (UniqueName: \"kubernetes.io/projected/8c80d2a8-36b4-455c-9d86-e8370e521199-kube-api-access-wl9sb\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.151523 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-client-ca\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.151559 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c80d2a8-36b4-455c-9d86-e8370e521199-serving-cert\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.151615 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-proxy-ca-bundles\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.151643 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-config\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.152383 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-client-ca\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.152853 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-proxy-ca-bundles\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.152896 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-config\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.160596 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c80d2a8-36b4-455c-9d86-e8370e521199-serving-cert\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.177244 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl9sb\" (UniqueName: \"kubernetes.io/projected/8c80d2a8-36b4-455c-9d86-e8370e521199-kube-api-access-wl9sb\") pod \"controller-manager-86457fbb69-9gsqs\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.322767 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.322825 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.347797 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.574555 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86457fbb69-9gsqs"] Dec 15 06:54:27 crc kubenswrapper[4876]: W1215 06:54:27.595966 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c80d2a8_36b4_455c_9d86_e8370e521199.slice/crio-7f296b211f55aaacd7c478ebf42447a33d310b01f5ec967fdc0684eb8fb2fda1 WatchSource:0}: Error finding container 7f296b211f55aaacd7c478ebf42447a33d310b01f5ec967fdc0684eb8fb2fda1: Status 404 returned error can't find the container with id 7f296b211f55aaacd7c478ebf42447a33d310b01f5ec967fdc0684eb8fb2fda1 Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.765389 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" event={"ID":"8c80d2a8-36b4-455c-9d86-e8370e521199","Type":"ContainerStarted","Data":"205558dadea38f11577b459800d41a5955a1f0f38c8eafeb878668385ac70c05"} Dec 15 06:54:27 crc kubenswrapper[4876]: I1215 06:54:27.765435 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" event={"ID":"8c80d2a8-36b4-455c-9d86-e8370e521199","Type":"ContainerStarted","Data":"7f296b211f55aaacd7c478ebf42447a33d310b01f5ec967fdc0684eb8fb2fda1"} Dec 15 06:54:28 crc kubenswrapper[4876]: I1215 06:54:28.183730 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:54:28 crc kubenswrapper[4876]: I1215 06:54:28.184043 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:54:28 crc kubenswrapper[4876]: I1215 06:54:28.772447 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:28 crc kubenswrapper[4876]: I1215 06:54:28.780904 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:28 crc kubenswrapper[4876]: I1215 06:54:28.793198 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" podStartSLOduration=7.793173247 podStartE2EDuration="7.793173247s" podCreationTimestamp="2025-12-15 06:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:54:28.79254393 +0000 UTC m=+194.363686851" watchObservedRunningTime="2025-12-15 06:54:28.793173247 +0000 UTC m=+194.364316208" Dec 15 06:54:29 crc kubenswrapper[4876]: I1215 06:54:29.267656 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-nbhbg" podUID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerName="registry-server" probeResult="failure" output=< Dec 15 06:54:29 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 06:54:29 crc kubenswrapper[4876]: > Dec 15 06:54:29 crc kubenswrapper[4876]: I1215 06:54:29.544723 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:54:29 crc kubenswrapper[4876]: I1215 06:54:29.545133 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:54:29 crc kubenswrapper[4876]: I1215 06:54:29.597021 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.480929 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.481583 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.487605 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.487851 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.498668 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.601256 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83344255-060f-4486-a1e3-0ed2df8bf72b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"83344255-060f-4486-a1e3-0ed2df8bf72b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.601364 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83344255-060f-4486-a1e3-0ed2df8bf72b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"83344255-060f-4486-a1e3-0ed2df8bf72b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.702573 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83344255-060f-4486-a1e3-0ed2df8bf72b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"83344255-060f-4486-a1e3-0ed2df8bf72b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.702655 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83344255-060f-4486-a1e3-0ed2df8bf72b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"83344255-060f-4486-a1e3-0ed2df8bf72b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.702772 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83344255-060f-4486-a1e3-0ed2df8bf72b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"83344255-060f-4486-a1e3-0ed2df8bf72b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.725500 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83344255-060f-4486-a1e3-0ed2df8bf72b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"83344255-060f-4486-a1e3-0ed2df8bf72b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.803688 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.828480 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:54:30 crc kubenswrapper[4876]: I1215 06:54:30.828557 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:54:31 crc kubenswrapper[4876]: I1215 06:54:31.214467 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 15 06:54:31 crc kubenswrapper[4876]: I1215 06:54:31.789735 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"83344255-060f-4486-a1e3-0ed2df8bf72b","Type":"ContainerStarted","Data":"c8ada20e4b7267a51e03f9c7a794f0a20b8ec7d9241fb0497a5f489ba642a4a3"} Dec 15 06:54:31 crc kubenswrapper[4876]: I1215 06:54:31.874730 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4bwjm" podUID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerName="registry-server" probeResult="failure" output=< Dec 15 06:54:31 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 06:54:31 crc kubenswrapper[4876]: > Dec 15 06:54:32 crc kubenswrapper[4876]: I1215 06:54:32.796296 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"83344255-060f-4486-a1e3-0ed2df8bf72b","Type":"ContainerStarted","Data":"fab14229b52b528e317e60591097bfc4ec61298087da2714d6137ad76a10252a"} Dec 15 06:54:32 crc kubenswrapper[4876]: I1215 06:54:32.813760 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.813734673 podStartE2EDuration="2.813734673s" podCreationTimestamp="2025-12-15 06:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:54:32.808180061 +0000 UTC m=+198.379323022" watchObservedRunningTime="2025-12-15 06:54:32.813734673 +0000 UTC m=+198.384877594" Dec 15 06:54:33 crc kubenswrapper[4876]: I1215 06:54:33.804462 4876 generic.go:334] "Generic (PLEG): container finished" podID="83344255-060f-4486-a1e3-0ed2df8bf72b" containerID="fab14229b52b528e317e60591097bfc4ec61298087da2714d6137ad76a10252a" exitCode=0 Dec 15 06:54:33 crc kubenswrapper[4876]: I1215 06:54:33.804519 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"83344255-060f-4486-a1e3-0ed2df8bf72b","Type":"ContainerDied","Data":"fab14229b52b528e317e60591097bfc4ec61298087da2714d6137ad76a10252a"} Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.147463 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.166345 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83344255-060f-4486-a1e3-0ed2df8bf72b-kube-api-access\") pod \"83344255-060f-4486-a1e3-0ed2df8bf72b\" (UID: \"83344255-060f-4486-a1e3-0ed2df8bf72b\") " Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.166407 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83344255-060f-4486-a1e3-0ed2df8bf72b-kubelet-dir\") pod \"83344255-060f-4486-a1e3-0ed2df8bf72b\" (UID: \"83344255-060f-4486-a1e3-0ed2df8bf72b\") " Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.166670 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/83344255-060f-4486-a1e3-0ed2df8bf72b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "83344255-060f-4486-a1e3-0ed2df8bf72b" (UID: "83344255-060f-4486-a1e3-0ed2df8bf72b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.176936 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83344255-060f-4486-a1e3-0ed2df8bf72b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "83344255-060f-4486-a1e3-0ed2df8bf72b" (UID: "83344255-060f-4486-a1e3-0ed2df8bf72b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.267274 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83344255-060f-4486-a1e3-0ed2df8bf72b-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.267312 4876 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83344255-060f-4486-a1e3-0ed2df8bf72b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.482484 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 15 06:54:35 crc kubenswrapper[4876]: E1215 06:54:35.482762 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83344255-060f-4486-a1e3-0ed2df8bf72b" containerName="pruner" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.482777 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="83344255-060f-4486-a1e3-0ed2df8bf72b" containerName="pruner" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.482886 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="83344255-060f-4486-a1e3-0ed2df8bf72b" containerName="pruner" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.483349 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.495658 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.571811 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.571936 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-var-lock\") pod \"installer-9-crc\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.572000 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kube-api-access\") pod \"installer-9-crc\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.674350 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kube-api-access\") pod \"installer-9-crc\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.674465 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.674508 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-var-lock\") pod \"installer-9-crc\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.674569 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-var-lock\") pod \"installer-9-crc\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.674568 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kubelet-dir\") pod \"installer-9-crc\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.697134 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kube-api-access\") pod \"installer-9-crc\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.817502 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"83344255-060f-4486-a1e3-0ed2df8bf72b","Type":"ContainerDied","Data":"c8ada20e4b7267a51e03f9c7a794f0a20b8ec7d9241fb0497a5f489ba642a4a3"} Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.817796 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8ada20e4b7267a51e03f9c7a794f0a20b8ec7d9241fb0497a5f489ba642a4a3" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.817577 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 15 06:54:35 crc kubenswrapper[4876]: I1215 06:54:35.818740 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:54:36 crc kubenswrapper[4876]: I1215 06:54:36.227321 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 15 06:54:36 crc kubenswrapper[4876]: I1215 06:54:36.824321 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"39ca6f1d-cafe-43e5-9d15-5175a55a73c8","Type":"ContainerStarted","Data":"125fb0e49eee0ae112edc6593dde50f7d0311cc630d137609a93a7f410996949"} Dec 15 06:54:37 crc kubenswrapper[4876]: I1215 06:54:37.830412 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"39ca6f1d-cafe-43e5-9d15-5175a55a73c8","Type":"ContainerStarted","Data":"5ab861f88420786f049e0614335adc2eb9257cd5174d2f161124ba6164a36d97"} Dec 15 06:54:37 crc kubenswrapper[4876]: I1215 06:54:37.854433 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.854413137 podStartE2EDuration="2.854413137s" podCreationTimestamp="2025-12-15 06:54:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:54:37.852387502 +0000 UTC m=+203.423530413" watchObservedRunningTime="2025-12-15 06:54:37.854413137 +0000 UTC m=+203.425556048" Dec 15 06:54:38 crc kubenswrapper[4876]: I1215 06:54:38.225829 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:54:38 crc kubenswrapper[4876]: I1215 06:54:38.279236 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:54:38 crc kubenswrapper[4876]: I1215 06:54:38.839234 4876 generic.go:334] "Generic (PLEG): container finished" podID="7dd38562-2323-4992-a015-7ba42406c1b5" containerID="7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638" exitCode=0 Dec 15 06:54:38 crc kubenswrapper[4876]: I1215 06:54:38.839325 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf2qs" event={"ID":"7dd38562-2323-4992-a015-7ba42406c1b5","Type":"ContainerDied","Data":"7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638"} Dec 15 06:54:38 crc kubenswrapper[4876]: I1215 06:54:38.844760 4876 generic.go:334] "Generic (PLEG): container finished" podID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerID="d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d" exitCode=0 Dec 15 06:54:38 crc kubenswrapper[4876]: I1215 06:54:38.844887 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7mnt" event={"ID":"95d0b586-aa73-44cc-8007-33b96ce899fa","Type":"ContainerDied","Data":"d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d"} Dec 15 06:54:38 crc kubenswrapper[4876]: I1215 06:54:38.848826 4876 generic.go:334] "Generic (PLEG): container finished" podID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerID="fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767" exitCode=0 Dec 15 06:54:38 crc kubenswrapper[4876]: I1215 06:54:38.848950 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9n2p9" event={"ID":"8c9a4d2a-d631-4257-8edd-82ef60db5de1","Type":"ContainerDied","Data":"fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767"} Dec 15 06:54:38 crc kubenswrapper[4876]: I1215 06:54:38.939665 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nbhbg"] Dec 15 06:54:39 crc kubenswrapper[4876]: I1215 06:54:39.582175 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:54:39 crc kubenswrapper[4876]: I1215 06:54:39.856061 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9n2p9" event={"ID":"8c9a4d2a-d631-4257-8edd-82ef60db5de1","Type":"ContainerStarted","Data":"b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83"} Dec 15 06:54:39 crc kubenswrapper[4876]: I1215 06:54:39.860237 4876 generic.go:334] "Generic (PLEG): container finished" podID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerID="ac3697dc9ba7d638dd4b4ae4349c7b532a387c863aff81ad690fe95b09e9a4aa" exitCode=0 Dec 15 06:54:39 crc kubenswrapper[4876]: I1215 06:54:39.860267 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c69j4" event={"ID":"cbc129a2-a1bc-42b7-a431-55ca7eb91d23","Type":"ContainerDied","Data":"ac3697dc9ba7d638dd4b4ae4349c7b532a387c863aff81ad690fe95b09e9a4aa"} Dec 15 06:54:39 crc kubenswrapper[4876]: I1215 06:54:39.862555 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf2qs" event={"ID":"7dd38562-2323-4992-a015-7ba42406c1b5","Type":"ContainerStarted","Data":"0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82"} Dec 15 06:54:39 crc kubenswrapper[4876]: I1215 06:54:39.869272 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7mnt" event={"ID":"95d0b586-aa73-44cc-8007-33b96ce899fa","Type":"ContainerStarted","Data":"f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc"} Dec 15 06:54:39 crc kubenswrapper[4876]: I1215 06:54:39.869444 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nbhbg" podUID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerName="registry-server" containerID="cri-o://00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b" gracePeriod=2 Dec 15 06:54:39 crc kubenswrapper[4876]: I1215 06:54:39.896704 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9n2p9" podStartSLOduration=2.637469878 podStartE2EDuration="52.896688583s" podCreationTimestamp="2025-12-15 06:53:47 +0000 UTC" firstStartedPulling="2025-12-15 06:53:49.006647016 +0000 UTC m=+154.577789917" lastFinishedPulling="2025-12-15 06:54:39.265865691 +0000 UTC m=+204.837008622" observedRunningTime="2025-12-15 06:54:39.894335502 +0000 UTC m=+205.465478423" watchObservedRunningTime="2025-12-15 06:54:39.896688583 +0000 UTC m=+205.467831484" Dec 15 06:54:39 crc kubenswrapper[4876]: I1215 06:54:39.917226 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p7mnt" podStartSLOduration=2.618073975 podStartE2EDuration="52.917205614s" podCreationTimestamp="2025-12-15 06:53:47 +0000 UTC" firstStartedPulling="2025-12-15 06:53:49.039793495 +0000 UTC m=+154.610936406" lastFinishedPulling="2025-12-15 06:54:39.338925134 +0000 UTC m=+204.910068045" observedRunningTime="2025-12-15 06:54:39.915353444 +0000 UTC m=+205.486496375" watchObservedRunningTime="2025-12-15 06:54:39.917205614 +0000 UTC m=+205.488348525" Dec 15 06:54:39 crc kubenswrapper[4876]: I1215 06:54:39.937676 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kf2qs" podStartSLOduration=2.6674641770000003 podStartE2EDuration="52.937654313s" podCreationTimestamp="2025-12-15 06:53:47 +0000 UTC" firstStartedPulling="2025-12-15 06:53:49.014750433 +0000 UTC m=+154.585893344" lastFinishedPulling="2025-12-15 06:54:39.284940569 +0000 UTC m=+204.856083480" observedRunningTime="2025-12-15 06:54:39.936277363 +0000 UTC m=+205.507420274" watchObservedRunningTime="2025-12-15 06:54:39.937654313 +0000 UTC m=+205.508797234" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.302086 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.349564 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zds85\" (UniqueName: \"kubernetes.io/projected/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-kube-api-access-zds85\") pod \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.349685 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-catalog-content\") pod \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.349760 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-utilities\") pod \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\" (UID: \"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719\") " Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.350744 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-utilities" (OuterVolumeSpecName: "utilities") pod "7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" (UID: "7eb9a25e-fdc9-47d6-abff-3cb10ffd3719"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.358716 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-kube-api-access-zds85" (OuterVolumeSpecName: "kube-api-access-zds85") pod "7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" (UID: "7eb9a25e-fdc9-47d6-abff-3cb10ffd3719"). InnerVolumeSpecName "kube-api-access-zds85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.407432 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" (UID: "7eb9a25e-fdc9-47d6-abff-3cb10ffd3719"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.451698 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zds85\" (UniqueName: \"kubernetes.io/projected/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-kube-api-access-zds85\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.451742 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.451753 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.877224 4876 generic.go:334] "Generic (PLEG): container finished" podID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerID="00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b" exitCode=0 Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.877331 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nbhbg" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.877323 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhbg" event={"ID":"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719","Type":"ContainerDied","Data":"00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b"} Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.879268 4876 scope.go:117] "RemoveContainer" containerID="00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.879214 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhbg" event={"ID":"7eb9a25e-fdc9-47d6-abff-3cb10ffd3719","Type":"ContainerDied","Data":"0f4ab9b1a13261b61abef46c677b6ab6a88689dc1d8778bf0e9d97f3cce535ba"} Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.880990 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.900261 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nbhbg"] Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.904458 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nbhbg"] Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.904658 4876 scope.go:117] "RemoveContainer" containerID="76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.934751 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.939890 4876 scope.go:117] "RemoveContainer" containerID="298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.962649 4876 scope.go:117] "RemoveContainer" containerID="00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b" Dec 15 06:54:40 crc kubenswrapper[4876]: E1215 06:54:40.964694 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b\": container with ID starting with 00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b not found: ID does not exist" containerID="00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.964839 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b"} err="failed to get container status \"00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b\": rpc error: code = NotFound desc = could not find container \"00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b\": container with ID starting with 00dc0e7b712b5d3e91d621daad410d19a9fe637a3e19fe4706c56c03cf6d7e6b not found: ID does not exist" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.964988 4876 scope.go:117] "RemoveContainer" containerID="76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44" Dec 15 06:54:40 crc kubenswrapper[4876]: E1215 06:54:40.965523 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44\": container with ID starting with 76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44 not found: ID does not exist" containerID="76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.965634 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44"} err="failed to get container status \"76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44\": rpc error: code = NotFound desc = could not find container \"76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44\": container with ID starting with 76764c866656ef1ac8cd3edd85ae0a58a8d34dfa2f33fded54551933894a3f44 not found: ID does not exist" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.965742 4876 scope.go:117] "RemoveContainer" containerID="298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1" Dec 15 06:54:40 crc kubenswrapper[4876]: E1215 06:54:40.966374 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1\": container with ID starting with 298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1 not found: ID does not exist" containerID="298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1" Dec 15 06:54:40 crc kubenswrapper[4876]: I1215 06:54:40.966425 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1"} err="failed to get container status \"298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1\": rpc error: code = NotFound desc = could not find container \"298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1\": container with ID starting with 298818ebff7e5cf0aa227a1b8b11c617e8fcbec8a64100b93563e1642dfee3c1 not found: ID does not exist" Dec 15 06:54:41 crc kubenswrapper[4876]: I1215 06:54:41.097585 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-86457fbb69-9gsqs"] Dec 15 06:54:41 crc kubenswrapper[4876]: I1215 06:54:41.097828 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" podUID="8c80d2a8-36b4-455c-9d86-e8370e521199" containerName="controller-manager" containerID="cri-o://205558dadea38f11577b459800d41a5955a1f0f38c8eafeb878668385ac70c05" gracePeriod=30 Dec 15 06:54:41 crc kubenswrapper[4876]: I1215 06:54:41.123096 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn"] Dec 15 06:54:41 crc kubenswrapper[4876]: I1215 06:54:41.123372 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" podUID="300af5af-5e2a-4109-ba2c-82f2fb20f62b" containerName="route-controller-manager" containerID="cri-o://58cfc5b2e17ab6798fe951f6f082a2eaad665de5db184934f0fdd146803fe7c9" gracePeriod=30 Dec 15 06:54:41 crc kubenswrapper[4876]: I1215 06:54:41.888551 4876 generic.go:334] "Generic (PLEG): container finished" podID="8c80d2a8-36b4-455c-9d86-e8370e521199" containerID="205558dadea38f11577b459800d41a5955a1f0f38c8eafeb878668385ac70c05" exitCode=0 Dec 15 06:54:41 crc kubenswrapper[4876]: I1215 06:54:41.888612 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" event={"ID":"8c80d2a8-36b4-455c-9d86-e8370e521199","Type":"ContainerDied","Data":"205558dadea38f11577b459800d41a5955a1f0f38c8eafeb878668385ac70c05"} Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.721999 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" path="/var/lib/kubelet/pods/7eb9a25e-fdc9-47d6-abff-3cb10ffd3719/volumes" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.779466 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.806969 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5f65cbcb98-lh62l"] Dec 15 06:54:42 crc kubenswrapper[4876]: E1215 06:54:42.807233 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerName="extract-utilities" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.807253 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerName="extract-utilities" Dec 15 06:54:42 crc kubenswrapper[4876]: E1215 06:54:42.807265 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerName="registry-server" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.807272 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerName="registry-server" Dec 15 06:54:42 crc kubenswrapper[4876]: E1215 06:54:42.807289 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c80d2a8-36b4-455c-9d86-e8370e521199" containerName="controller-manager" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.807298 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c80d2a8-36b4-455c-9d86-e8370e521199" containerName="controller-manager" Dec 15 06:54:42 crc kubenswrapper[4876]: E1215 06:54:42.807316 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerName="extract-content" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.807324 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerName="extract-content" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.807439 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c80d2a8-36b4-455c-9d86-e8370e521199" containerName="controller-manager" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.807460 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eb9a25e-fdc9-47d6-abff-3cb10ffd3719" containerName="registry-server" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.808144 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.824580 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f65cbcb98-lh62l"] Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.893461 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-proxy-ca-bundles\") pod \"8c80d2a8-36b4-455c-9d86-e8370e521199\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.893539 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c80d2a8-36b4-455c-9d86-e8370e521199-serving-cert\") pod \"8c80d2a8-36b4-455c-9d86-e8370e521199\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.893571 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-config\") pod \"8c80d2a8-36b4-455c-9d86-e8370e521199\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.893626 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-client-ca\") pod \"8c80d2a8-36b4-455c-9d86-e8370e521199\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.893719 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wl9sb\" (UniqueName: \"kubernetes.io/projected/8c80d2a8-36b4-455c-9d86-e8370e521199-kube-api-access-wl9sb\") pod \"8c80d2a8-36b4-455c-9d86-e8370e521199\" (UID: \"8c80d2a8-36b4-455c-9d86-e8370e521199\") " Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.893892 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-proxy-ca-bundles\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.893961 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-client-ca\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.893999 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-config\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.894020 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a564eb2f-252e-4969-9704-af45f2667923-serving-cert\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.894057 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs6lt\" (UniqueName: \"kubernetes.io/projected/a564eb2f-252e-4969-9704-af45f2667923-kube-api-access-bs6lt\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.894909 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-config" (OuterVolumeSpecName: "config") pod "8c80d2a8-36b4-455c-9d86-e8370e521199" (UID: "8c80d2a8-36b4-455c-9d86-e8370e521199"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.895012 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8c80d2a8-36b4-455c-9d86-e8370e521199" (UID: "8c80d2a8-36b4-455c-9d86-e8370e521199"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.895436 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-client-ca" (OuterVolumeSpecName: "client-ca") pod "8c80d2a8-36b4-455c-9d86-e8370e521199" (UID: "8c80d2a8-36b4-455c-9d86-e8370e521199"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.900698 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c80d2a8-36b4-455c-9d86-e8370e521199-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8c80d2a8-36b4-455c-9d86-e8370e521199" (UID: "8c80d2a8-36b4-455c-9d86-e8370e521199"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.901869 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c80d2a8-36b4-455c-9d86-e8370e521199-kube-api-access-wl9sb" (OuterVolumeSpecName: "kube-api-access-wl9sb") pod "8c80d2a8-36b4-455c-9d86-e8370e521199" (UID: "8c80d2a8-36b4-455c-9d86-e8370e521199"). InnerVolumeSpecName "kube-api-access-wl9sb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.902968 4876 generic.go:334] "Generic (PLEG): container finished" podID="300af5af-5e2a-4109-ba2c-82f2fb20f62b" containerID="58cfc5b2e17ab6798fe951f6f082a2eaad665de5db184934f0fdd146803fe7c9" exitCode=0 Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.903034 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" event={"ID":"300af5af-5e2a-4109-ba2c-82f2fb20f62b","Type":"ContainerDied","Data":"58cfc5b2e17ab6798fe951f6f082a2eaad665de5db184934f0fdd146803fe7c9"} Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.905437 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" event={"ID":"8c80d2a8-36b4-455c-9d86-e8370e521199","Type":"ContainerDied","Data":"7f296b211f55aaacd7c478ebf42447a33d310b01f5ec967fdc0684eb8fb2fda1"} Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.905474 4876 scope.go:117] "RemoveContainer" containerID="205558dadea38f11577b459800d41a5955a1f0f38c8eafeb878668385ac70c05" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.905573 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86457fbb69-9gsqs" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.962871 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-86457fbb69-9gsqs"] Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.966486 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-86457fbb69-9gsqs"] Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.995447 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-proxy-ca-bundles\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.995540 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-client-ca\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.995574 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-config\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.995593 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a564eb2f-252e-4969-9704-af45f2667923-serving-cert\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.995624 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs6lt\" (UniqueName: \"kubernetes.io/projected/a564eb2f-252e-4969-9704-af45f2667923-kube-api-access-bs6lt\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.995685 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wl9sb\" (UniqueName: \"kubernetes.io/projected/8c80d2a8-36b4-455c-9d86-e8370e521199-kube-api-access-wl9sb\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.995697 4876 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.995707 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c80d2a8-36b4-455c-9d86-e8370e521199-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.995718 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.995728 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c80d2a8-36b4-455c-9d86-e8370e521199-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.996529 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-client-ca\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.996859 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-config\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:42 crc kubenswrapper[4876]: I1215 06:54:42.997062 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-proxy-ca-bundles\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.003975 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a564eb2f-252e-4969-9704-af45f2667923-serving-cert\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.015474 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs6lt\" (UniqueName: \"kubernetes.io/projected/a564eb2f-252e-4969-9704-af45f2667923-kube-api-access-bs6lt\") pod \"controller-manager-5f65cbcb98-lh62l\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.124448 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.309488 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f65cbcb98-lh62l"] Dec 15 06:54:43 crc kubenswrapper[4876]: W1215 06:54:43.312381 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda564eb2f_252e_4969_9704_af45f2667923.slice/crio-a44a44ff6c9dc6d63b4e305599498bb45737894497b7cbe93400509752e88638 WatchSource:0}: Error finding container a44a44ff6c9dc6d63b4e305599498bb45737894497b7cbe93400509752e88638: Status 404 returned error can't find the container with id a44a44ff6c9dc6d63b4e305599498bb45737894497b7cbe93400509752e88638 Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.470678 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.505319 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/300af5af-5e2a-4109-ba2c-82f2fb20f62b-serving-cert\") pod \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.505369 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrpm9\" (UniqueName: \"kubernetes.io/projected/300af5af-5e2a-4109-ba2c-82f2fb20f62b-kube-api-access-nrpm9\") pod \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.505408 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-config\") pod \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.505454 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-client-ca\") pod \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\" (UID: \"300af5af-5e2a-4109-ba2c-82f2fb20f62b\") " Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.507015 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-config" (OuterVolumeSpecName: "config") pod "300af5af-5e2a-4109-ba2c-82f2fb20f62b" (UID: "300af5af-5e2a-4109-ba2c-82f2fb20f62b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.509410 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-client-ca" (OuterVolumeSpecName: "client-ca") pod "300af5af-5e2a-4109-ba2c-82f2fb20f62b" (UID: "300af5af-5e2a-4109-ba2c-82f2fb20f62b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.511373 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/300af5af-5e2a-4109-ba2c-82f2fb20f62b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "300af5af-5e2a-4109-ba2c-82f2fb20f62b" (UID: "300af5af-5e2a-4109-ba2c-82f2fb20f62b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.511871 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/300af5af-5e2a-4109-ba2c-82f2fb20f62b-kube-api-access-nrpm9" (OuterVolumeSpecName: "kube-api-access-nrpm9") pod "300af5af-5e2a-4109-ba2c-82f2fb20f62b" (UID: "300af5af-5e2a-4109-ba2c-82f2fb20f62b"). InnerVolumeSpecName "kube-api-access-nrpm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.606330 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/300af5af-5e2a-4109-ba2c-82f2fb20f62b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.606359 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrpm9\" (UniqueName: \"kubernetes.io/projected/300af5af-5e2a-4109-ba2c-82f2fb20f62b-kube-api-access-nrpm9\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.606368 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.606376 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/300af5af-5e2a-4109-ba2c-82f2fb20f62b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.912738 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bgm7t" event={"ID":"9a81db39-a0c0-4be6-8e60-73fdf457b8b9","Type":"ContainerStarted","Data":"afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c"} Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.914246 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.914248 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn" event={"ID":"300af5af-5e2a-4109-ba2c-82f2fb20f62b","Type":"ContainerDied","Data":"cbc7277909b45f2cb5bab56ef802522d5256c1cbbf3a2d12515413ab0bc29e9c"} Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.914456 4876 scope.go:117] "RemoveContainer" containerID="58cfc5b2e17ab6798fe951f6f082a2eaad665de5db184934f0fdd146803fe7c9" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.918904 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" event={"ID":"a564eb2f-252e-4969-9704-af45f2667923","Type":"ContainerStarted","Data":"83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2"} Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.918946 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" event={"ID":"a564eb2f-252e-4969-9704-af45f2667923","Type":"ContainerStarted","Data":"a44a44ff6c9dc6d63b4e305599498bb45737894497b7cbe93400509752e88638"} Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.919229 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.928573 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c69j4" event={"ID":"cbc129a2-a1bc-42b7-a431-55ca7eb91d23","Type":"ContainerStarted","Data":"77c2282ffca8df81ba751b866393e585ddd9997362f56488171ce326769c096d"} Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.939364 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.963059 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" podStartSLOduration=2.963042718 podStartE2EDuration="2.963042718s" podCreationTimestamp="2025-12-15 06:54:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:54:43.95950678 +0000 UTC m=+209.530649691" watchObservedRunningTime="2025-12-15 06:54:43.963042718 +0000 UTC m=+209.534185629" Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.973771 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn"] Dec 15 06:54:43 crc kubenswrapper[4876]: I1215 06:54:43.976425 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7fdd4fd4f8-q7kfn"] Dec 15 06:54:44 crc kubenswrapper[4876]: I1215 06:54:44.715030 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="300af5af-5e2a-4109-ba2c-82f2fb20f62b" path="/var/lib/kubelet/pods/300af5af-5e2a-4109-ba2c-82f2fb20f62b/volumes" Dec 15 06:54:44 crc kubenswrapper[4876]: I1215 06:54:44.716179 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c80d2a8-36b4-455c-9d86-e8370e521199" path="/var/lib/kubelet/pods/8c80d2a8-36b4-455c-9d86-e8370e521199/volumes" Dec 15 06:54:44 crc kubenswrapper[4876]: I1215 06:54:44.936621 4876 generic.go:334] "Generic (PLEG): container finished" podID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerID="afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c" exitCode=0 Dec 15 06:54:44 crc kubenswrapper[4876]: I1215 06:54:44.936683 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bgm7t" event={"ID":"9a81db39-a0c0-4be6-8e60-73fdf457b8b9","Type":"ContainerDied","Data":"afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c"} Dec 15 06:54:44 crc kubenswrapper[4876]: I1215 06:54:44.981034 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-c69j4" podStartSLOduration=3.705904494 podStartE2EDuration="55.981013886s" podCreationTimestamp="2025-12-15 06:53:49 +0000 UTC" firstStartedPulling="2025-12-15 06:53:51.191210265 +0000 UTC m=+156.762353176" lastFinishedPulling="2025-12-15 06:54:43.466319657 +0000 UTC m=+209.037462568" observedRunningTime="2025-12-15 06:54:44.976660401 +0000 UTC m=+210.547803322" watchObservedRunningTime="2025-12-15 06:54:44.981013886 +0000 UTC m=+210.552156797" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.045177 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9"] Dec 15 06:54:45 crc kubenswrapper[4876]: E1215 06:54:45.045425 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="300af5af-5e2a-4109-ba2c-82f2fb20f62b" containerName="route-controller-manager" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.045448 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="300af5af-5e2a-4109-ba2c-82f2fb20f62b" containerName="route-controller-manager" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.045538 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="300af5af-5e2a-4109-ba2c-82f2fb20f62b" containerName="route-controller-manager" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.045928 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.049555 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.049856 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.049898 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.050025 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.050096 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.050178 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.064947 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9"] Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.126348 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1e28ae9-dd4c-4120-975f-5df7881b6880-serving-cert\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.126685 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sn4b\" (UniqueName: \"kubernetes.io/projected/d1e28ae9-dd4c-4120-975f-5df7881b6880-kube-api-access-7sn4b\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.126744 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-client-ca\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.126783 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-config\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.227654 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1e28ae9-dd4c-4120-975f-5df7881b6880-serving-cert\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.227745 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sn4b\" (UniqueName: \"kubernetes.io/projected/d1e28ae9-dd4c-4120-975f-5df7881b6880-kube-api-access-7sn4b\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.227806 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-client-ca\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.227866 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-config\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.228848 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-client-ca\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.229621 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-config\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.235453 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1e28ae9-dd4c-4120-975f-5df7881b6880-serving-cert\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.249604 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sn4b\" (UniqueName: \"kubernetes.io/projected/d1e28ae9-dd4c-4120-975f-5df7881b6880-kube-api-access-7sn4b\") pod \"route-controller-manager-6dbff8c7dc-mvjw9\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.363117 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.828900 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9"] Dec 15 06:54:45 crc kubenswrapper[4876]: I1215 06:54:45.946690 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" event={"ID":"d1e28ae9-dd4c-4120-975f-5df7881b6880","Type":"ContainerStarted","Data":"10df5e4f08fb47868338f68b714d8ca7614647b1c2200baff487a875c96e99b1"} Dec 15 06:54:46 crc kubenswrapper[4876]: I1215 06:54:46.956558 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bgm7t" event={"ID":"9a81db39-a0c0-4be6-8e60-73fdf457b8b9","Type":"ContainerStarted","Data":"b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342"} Dec 15 06:54:46 crc kubenswrapper[4876]: I1215 06:54:46.959678 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" event={"ID":"d1e28ae9-dd4c-4120-975f-5df7881b6880","Type":"ContainerStarted","Data":"fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0"} Dec 15 06:54:46 crc kubenswrapper[4876]: I1215 06:54:46.959927 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:46 crc kubenswrapper[4876]: I1215 06:54:46.964871 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:54:46 crc kubenswrapper[4876]: I1215 06:54:46.975753 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bgm7t" podStartSLOduration=3.249332348 podStartE2EDuration="56.975736259s" podCreationTimestamp="2025-12-15 06:53:50 +0000 UTC" firstStartedPulling="2025-12-15 06:53:52.268425753 +0000 UTC m=+157.839568664" lastFinishedPulling="2025-12-15 06:54:45.994829674 +0000 UTC m=+211.565972575" observedRunningTime="2025-12-15 06:54:46.974780298 +0000 UTC m=+212.545923239" watchObservedRunningTime="2025-12-15 06:54:46.975736259 +0000 UTC m=+212.546879170" Dec 15 06:54:46 crc kubenswrapper[4876]: I1215 06:54:46.991607 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" podStartSLOduration=5.991586687 podStartE2EDuration="5.991586687s" podCreationTimestamp="2025-12-15 06:54:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:54:46.991454374 +0000 UTC m=+212.562597305" watchObservedRunningTime="2025-12-15 06:54:46.991586687 +0000 UTC m=+212.562729608" Dec 15 06:54:47 crc kubenswrapper[4876]: I1215 06:54:47.589561 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:54:47 crc kubenswrapper[4876]: I1215 06:54:47.589650 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:54:47 crc kubenswrapper[4876]: I1215 06:54:47.656219 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:54:47 crc kubenswrapper[4876]: I1215 06:54:47.775566 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:54:47 crc kubenswrapper[4876]: I1215 06:54:47.775612 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:54:47 crc kubenswrapper[4876]: I1215 06:54:47.824233 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:54:47 crc kubenswrapper[4876]: I1215 06:54:47.976210 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:54:47 crc kubenswrapper[4876]: I1215 06:54:47.976263 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:54:48 crc kubenswrapper[4876]: I1215 06:54:48.020780 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:54:48 crc kubenswrapper[4876]: I1215 06:54:48.029296 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:54:48 crc kubenswrapper[4876]: I1215 06:54:48.029394 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:54:49 crc kubenswrapper[4876]: I1215 06:54:49.019207 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:54:49 crc kubenswrapper[4876]: I1215 06:54:49.942288 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p7mnt"] Dec 15 06:54:49 crc kubenswrapper[4876]: I1215 06:54:49.960166 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:54:49 crc kubenswrapper[4876]: I1215 06:54:49.960221 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:54:49 crc kubenswrapper[4876]: I1215 06:54:49.999529 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:54:50 crc kubenswrapper[4876]: I1215 06:54:50.055278 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:54:50 crc kubenswrapper[4876]: I1215 06:54:50.983744 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-p7mnt" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerName="registry-server" containerID="cri-o://f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc" gracePeriod=2 Dec 15 06:54:51 crc kubenswrapper[4876]: I1215 06:54:51.136862 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:54:51 crc kubenswrapper[4876]: I1215 06:54:51.138021 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.174569 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bgm7t" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerName="registry-server" probeResult="failure" output=< Dec 15 06:54:52 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 06:54:52 crc kubenswrapper[4876]: > Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.536418 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c69j4"] Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.536943 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-c69j4" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerName="registry-server" containerID="cri-o://77c2282ffca8df81ba751b866393e585ddd9997362f56488171ce326769c096d" gracePeriod=2 Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.568930 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.634216 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5hr\" (UniqueName: \"kubernetes.io/projected/95d0b586-aa73-44cc-8007-33b96ce899fa-kube-api-access-mg5hr\") pod \"95d0b586-aa73-44cc-8007-33b96ce899fa\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.634265 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-utilities\") pod \"95d0b586-aa73-44cc-8007-33b96ce899fa\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.634306 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-catalog-content\") pod \"95d0b586-aa73-44cc-8007-33b96ce899fa\" (UID: \"95d0b586-aa73-44cc-8007-33b96ce899fa\") " Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.635415 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-utilities" (OuterVolumeSpecName: "utilities") pod "95d0b586-aa73-44cc-8007-33b96ce899fa" (UID: "95d0b586-aa73-44cc-8007-33b96ce899fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.641842 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95d0b586-aa73-44cc-8007-33b96ce899fa-kube-api-access-mg5hr" (OuterVolumeSpecName: "kube-api-access-mg5hr") pod "95d0b586-aa73-44cc-8007-33b96ce899fa" (UID: "95d0b586-aa73-44cc-8007-33b96ce899fa"). InnerVolumeSpecName "kube-api-access-mg5hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.698530 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "95d0b586-aa73-44cc-8007-33b96ce899fa" (UID: "95d0b586-aa73-44cc-8007-33b96ce899fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.737269 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.737308 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5hr\" (UniqueName: \"kubernetes.io/projected/95d0b586-aa73-44cc-8007-33b96ce899fa-kube-api-access-mg5hr\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.737447 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/95d0b586-aa73-44cc-8007-33b96ce899fa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.996625 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p7mnt" Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.996710 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7mnt" event={"ID":"95d0b586-aa73-44cc-8007-33b96ce899fa","Type":"ContainerDied","Data":"f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc"} Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.996724 4876 generic.go:334] "Generic (PLEG): container finished" podID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerID="f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc" exitCode=0 Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.996755 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p7mnt" event={"ID":"95d0b586-aa73-44cc-8007-33b96ce899fa","Type":"ContainerDied","Data":"c5ca9de09bfa7a75f443af31f924554eb329bfe5b932616cbdb1063749e633bd"} Dec 15 06:54:52 crc kubenswrapper[4876]: I1215 06:54:52.996778 4876 scope.go:117] "RemoveContainer" containerID="f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.000002 4876 generic.go:334] "Generic (PLEG): container finished" podID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerID="77c2282ffca8df81ba751b866393e585ddd9997362f56488171ce326769c096d" exitCode=0 Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.000040 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c69j4" event={"ID":"cbc129a2-a1bc-42b7-a431-55ca7eb91d23","Type":"ContainerDied","Data":"77c2282ffca8df81ba751b866393e585ddd9997362f56488171ce326769c096d"} Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.039723 4876 scope.go:117] "RemoveContainer" containerID="d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.040474 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.047729 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p7mnt"] Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.051908 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-p7mnt"] Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.063443 4876 scope.go:117] "RemoveContainer" containerID="fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.081040 4876 scope.go:117] "RemoveContainer" containerID="f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc" Dec 15 06:54:53 crc kubenswrapper[4876]: E1215 06:54:53.081564 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc\": container with ID starting with f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc not found: ID does not exist" containerID="f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.081615 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc"} err="failed to get container status \"f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc\": rpc error: code = NotFound desc = could not find container \"f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc\": container with ID starting with f14686ba2d72922868c33ed2422d36e0e4df100f3655f361ded5854454c453dc not found: ID does not exist" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.081650 4876 scope.go:117] "RemoveContainer" containerID="d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d" Dec 15 06:54:53 crc kubenswrapper[4876]: E1215 06:54:53.081901 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d\": container with ID starting with d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d not found: ID does not exist" containerID="d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.081933 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d"} err="failed to get container status \"d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d\": rpc error: code = NotFound desc = could not find container \"d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d\": container with ID starting with d0304a7864168e620d65fee4e29e91a6bcaac561b33b931c9d5ea03eb8adba7d not found: ID does not exist" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.081955 4876 scope.go:117] "RemoveContainer" containerID="fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855" Dec 15 06:54:53 crc kubenswrapper[4876]: E1215 06:54:53.082287 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855\": container with ID starting with fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855 not found: ID does not exist" containerID="fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.082321 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855"} err="failed to get container status \"fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855\": rpc error: code = NotFound desc = could not find container \"fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855\": container with ID starting with fb032fcb7f28a6d66d74a84b935244dc387bfa3783d78f4c1b79da7d86e22855 not found: ID does not exist" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.140804 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-utilities\") pod \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.140926 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-catalog-content\") pod \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.140955 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fr89\" (UniqueName: \"kubernetes.io/projected/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-kube-api-access-6fr89\") pod \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\" (UID: \"cbc129a2-a1bc-42b7-a431-55ca7eb91d23\") " Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.141609 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-utilities" (OuterVolumeSpecName: "utilities") pod "cbc129a2-a1bc-42b7-a431-55ca7eb91d23" (UID: "cbc129a2-a1bc-42b7-a431-55ca7eb91d23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.146017 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-kube-api-access-6fr89" (OuterVolumeSpecName: "kube-api-access-6fr89") pod "cbc129a2-a1bc-42b7-a431-55ca7eb91d23" (UID: "cbc129a2-a1bc-42b7-a431-55ca7eb91d23"). InnerVolumeSpecName "kube-api-access-6fr89". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.162247 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cbc129a2-a1bc-42b7-a431-55ca7eb91d23" (UID: "cbc129a2-a1bc-42b7-a431-55ca7eb91d23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.242596 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.242651 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fr89\" (UniqueName: \"kubernetes.io/projected/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-kube-api-access-6fr89\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:53 crc kubenswrapper[4876]: I1215 06:54:53.242667 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cbc129a2-a1bc-42b7-a431-55ca7eb91d23-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:54:54 crc kubenswrapper[4876]: I1215 06:54:54.014622 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c69j4" event={"ID":"cbc129a2-a1bc-42b7-a431-55ca7eb91d23","Type":"ContainerDied","Data":"86f9b53b25b41428acc746415c272391d6abf347b86cfb587387a5213cbfa581"} Dec 15 06:54:54 crc kubenswrapper[4876]: I1215 06:54:54.015247 4876 scope.go:117] "RemoveContainer" containerID="77c2282ffca8df81ba751b866393e585ddd9997362f56488171ce326769c096d" Dec 15 06:54:54 crc kubenswrapper[4876]: I1215 06:54:54.014769 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c69j4" Dec 15 06:54:54 crc kubenswrapper[4876]: I1215 06:54:54.041973 4876 scope.go:117] "RemoveContainer" containerID="ac3697dc9ba7d638dd4b4ae4349c7b532a387c863aff81ad690fe95b09e9a4aa" Dec 15 06:54:54 crc kubenswrapper[4876]: I1215 06:54:54.060652 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c69j4"] Dec 15 06:54:54 crc kubenswrapper[4876]: I1215 06:54:54.066850 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-c69j4"] Dec 15 06:54:54 crc kubenswrapper[4876]: I1215 06:54:54.072163 4876 scope.go:117] "RemoveContainer" containerID="56fc1a2c5bc37981ae7f1b42da098c8b3176211032904bf781a9a8cf417ec047" Dec 15 06:54:54 crc kubenswrapper[4876]: I1215 06:54:54.712298 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" path="/var/lib/kubelet/pods/95d0b586-aa73-44cc-8007-33b96ce899fa/volumes" Dec 15 06:54:54 crc kubenswrapper[4876]: I1215 06:54:54.712862 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" path="/var/lib/kubelet/pods/cbc129a2-a1bc-42b7-a431-55ca7eb91d23/volumes" Dec 15 06:54:57 crc kubenswrapper[4876]: I1215 06:54:57.323004 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 06:54:57 crc kubenswrapper[4876]: I1215 06:54:57.323360 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 06:54:57 crc kubenswrapper[4876]: I1215 06:54:57.323405 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:54:57 crc kubenswrapper[4876]: I1215 06:54:57.324171 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 06:54:57 crc kubenswrapper[4876]: I1215 06:54:57.324234 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb" gracePeriod=600 Dec 15 06:54:58 crc kubenswrapper[4876]: I1215 06:54:58.040065 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb" exitCode=0 Dec 15 06:54:58 crc kubenswrapper[4876]: I1215 06:54:58.040166 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb"} Dec 15 06:54:58 crc kubenswrapper[4876]: I1215 06:54:58.040745 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"825cf2c8f950e571313940a8e3c45129c0f9cb26b3446ce028d2134153b5b61d"} Dec 15 06:54:58 crc kubenswrapper[4876]: I1215 06:54:58.437874 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-bkzhk"] Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.106204 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5f65cbcb98-lh62l"] Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.107036 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" podUID="a564eb2f-252e-4969-9704-af45f2667923" containerName="controller-manager" containerID="cri-o://83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2" gracePeriod=30 Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.195143 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.222759 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9"] Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.223004 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" podUID="d1e28ae9-dd4c-4120-975f-5df7881b6880" containerName="route-controller-manager" containerID="cri-o://fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0" gracePeriod=30 Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.251763 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.537482 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bgm7t"] Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.646605 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.705076 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.781999 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bs6lt\" (UniqueName: \"kubernetes.io/projected/a564eb2f-252e-4969-9704-af45f2667923-kube-api-access-bs6lt\") pod \"a564eb2f-252e-4969-9704-af45f2667923\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.782050 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-config\") pod \"d1e28ae9-dd4c-4120-975f-5df7881b6880\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.782088 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-proxy-ca-bundles\") pod \"a564eb2f-252e-4969-9704-af45f2667923\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.782129 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-config\") pod \"a564eb2f-252e-4969-9704-af45f2667923\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.782158 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sn4b\" (UniqueName: \"kubernetes.io/projected/d1e28ae9-dd4c-4120-975f-5df7881b6880-kube-api-access-7sn4b\") pod \"d1e28ae9-dd4c-4120-975f-5df7881b6880\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.782199 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a564eb2f-252e-4969-9704-af45f2667923-serving-cert\") pod \"a564eb2f-252e-4969-9704-af45f2667923\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.782221 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-client-ca\") pod \"d1e28ae9-dd4c-4120-975f-5df7881b6880\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.782265 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1e28ae9-dd4c-4120-975f-5df7881b6880-serving-cert\") pod \"d1e28ae9-dd4c-4120-975f-5df7881b6880\" (UID: \"d1e28ae9-dd4c-4120-975f-5df7881b6880\") " Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.782283 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-client-ca\") pod \"a564eb2f-252e-4969-9704-af45f2667923\" (UID: \"a564eb2f-252e-4969-9704-af45f2667923\") " Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.782885 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-client-ca" (OuterVolumeSpecName: "client-ca") pod "a564eb2f-252e-4969-9704-af45f2667923" (UID: "a564eb2f-252e-4969-9704-af45f2667923"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.783363 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a564eb2f-252e-4969-9704-af45f2667923" (UID: "a564eb2f-252e-4969-9704-af45f2667923"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.783482 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-client-ca" (OuterVolumeSpecName: "client-ca") pod "d1e28ae9-dd4c-4120-975f-5df7881b6880" (UID: "d1e28ae9-dd4c-4120-975f-5df7881b6880"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.783954 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-config" (OuterVolumeSpecName: "config") pod "a564eb2f-252e-4969-9704-af45f2667923" (UID: "a564eb2f-252e-4969-9704-af45f2667923"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.784386 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-config" (OuterVolumeSpecName: "config") pod "d1e28ae9-dd4c-4120-975f-5df7881b6880" (UID: "d1e28ae9-dd4c-4120-975f-5df7881b6880"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.788673 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a564eb2f-252e-4969-9704-af45f2667923-kube-api-access-bs6lt" (OuterVolumeSpecName: "kube-api-access-bs6lt") pod "a564eb2f-252e-4969-9704-af45f2667923" (UID: "a564eb2f-252e-4969-9704-af45f2667923"). InnerVolumeSpecName "kube-api-access-bs6lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.788806 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a564eb2f-252e-4969-9704-af45f2667923-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a564eb2f-252e-4969-9704-af45f2667923" (UID: "a564eb2f-252e-4969-9704-af45f2667923"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.788905 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1e28ae9-dd4c-4120-975f-5df7881b6880-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d1e28ae9-dd4c-4120-975f-5df7881b6880" (UID: "d1e28ae9-dd4c-4120-975f-5df7881b6880"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.788900 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1e28ae9-dd4c-4120-975f-5df7881b6880-kube-api-access-7sn4b" (OuterVolumeSpecName: "kube-api-access-7sn4b") pod "d1e28ae9-dd4c-4120-975f-5df7881b6880" (UID: "d1e28ae9-dd4c-4120-975f-5df7881b6880"). InnerVolumeSpecName "kube-api-access-7sn4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.883684 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a564eb2f-252e-4969-9704-af45f2667923-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.883726 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.883735 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1e28ae9-dd4c-4120-975f-5df7881b6880-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.883744 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.883754 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bs6lt\" (UniqueName: \"kubernetes.io/projected/a564eb2f-252e-4969-9704-af45f2667923-kube-api-access-bs6lt\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.883764 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1e28ae9-dd4c-4120-975f-5df7881b6880-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.883772 4876 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.883780 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a564eb2f-252e-4969-9704-af45f2667923-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:01 crc kubenswrapper[4876]: I1215 06:55:01.883788 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sn4b\" (UniqueName: \"kubernetes.io/projected/d1e28ae9-dd4c-4120-975f-5df7881b6880-kube-api-access-7sn4b\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.069384 4876 generic.go:334] "Generic (PLEG): container finished" podID="d1e28ae9-dd4c-4120-975f-5df7881b6880" containerID="fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0" exitCode=0 Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.069494 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" event={"ID":"d1e28ae9-dd4c-4120-975f-5df7881b6880","Type":"ContainerDied","Data":"fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0"} Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.069570 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" event={"ID":"d1e28ae9-dd4c-4120-975f-5df7881b6880","Type":"ContainerDied","Data":"10df5e4f08fb47868338f68b714d8ca7614647b1c2200baff487a875c96e99b1"} Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.069592 4876 scope.go:117] "RemoveContainer" containerID="fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.069822 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.073858 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" event={"ID":"a564eb2f-252e-4969-9704-af45f2667923","Type":"ContainerDied","Data":"83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2"} Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.073903 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.083172 4876 generic.go:334] "Generic (PLEG): container finished" podID="a564eb2f-252e-4969-9704-af45f2667923" containerID="83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2" exitCode=0 Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.083668 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f65cbcb98-lh62l" event={"ID":"a564eb2f-252e-4969-9704-af45f2667923","Type":"ContainerDied","Data":"a44a44ff6c9dc6d63b4e305599498bb45737894497b7cbe93400509752e88638"} Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.089005 4876 scope.go:117] "RemoveContainer" containerID="fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0" Dec 15 06:55:02 crc kubenswrapper[4876]: E1215 06:55:02.089893 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0\": container with ID starting with fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0 not found: ID does not exist" containerID="fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.089922 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0"} err="failed to get container status \"fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0\": rpc error: code = NotFound desc = could not find container \"fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0\": container with ID starting with fef8b2af02b54a1dcfc230b5b034333ff9461fe26b6ccd48d45ce49005400fb0 not found: ID does not exist" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.089941 4876 scope.go:117] "RemoveContainer" containerID="83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.107006 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9"] Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.111833 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6dbff8c7dc-mvjw9"] Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.120391 4876 scope.go:117] "RemoveContainer" containerID="83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.123982 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5f65cbcb98-lh62l"] Dec 15 06:55:02 crc kubenswrapper[4876]: E1215 06:55:02.124907 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2\": container with ID starting with 83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2 not found: ID does not exist" containerID="83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.124953 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2"} err="failed to get container status \"83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2\": rpc error: code = NotFound desc = could not find container \"83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2\": container with ID starting with 83adc4e8e355faf8723400b3140f0b5db6d746c54189b327955e168eaf56a8e2 not found: ID does not exist" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.126823 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5f65cbcb98-lh62l"] Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.730364 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a564eb2f-252e-4969-9704-af45f2667923" path="/var/lib/kubelet/pods/a564eb2f-252e-4969-9704-af45f2667923/volumes" Dec 15 06:55:02 crc kubenswrapper[4876]: I1215 06:55:02.732341 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1e28ae9-dd4c-4120-975f-5df7881b6880" path="/var/lib/kubelet/pods/d1e28ae9-dd4c-4120-975f-5df7881b6880/volumes" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.067862 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-679646c95-f2k85"] Dec 15 06:55:03 crc kubenswrapper[4876]: E1215 06:55:03.068267 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerName="extract-utilities" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068292 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerName="extract-utilities" Dec 15 06:55:03 crc kubenswrapper[4876]: E1215 06:55:03.068307 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerName="extract-content" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068319 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerName="extract-content" Dec 15 06:55:03 crc kubenswrapper[4876]: E1215 06:55:03.068329 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerName="extract-content" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068338 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerName="extract-content" Dec 15 06:55:03 crc kubenswrapper[4876]: E1215 06:55:03.068350 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerName="registry-server" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068357 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerName="registry-server" Dec 15 06:55:03 crc kubenswrapper[4876]: E1215 06:55:03.068374 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerName="registry-server" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068382 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerName="registry-server" Dec 15 06:55:03 crc kubenswrapper[4876]: E1215 06:55:03.068396 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a564eb2f-252e-4969-9704-af45f2667923" containerName="controller-manager" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068406 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a564eb2f-252e-4969-9704-af45f2667923" containerName="controller-manager" Dec 15 06:55:03 crc kubenswrapper[4876]: E1215 06:55:03.068415 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1e28ae9-dd4c-4120-975f-5df7881b6880" containerName="route-controller-manager" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068423 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1e28ae9-dd4c-4120-975f-5df7881b6880" containerName="route-controller-manager" Dec 15 06:55:03 crc kubenswrapper[4876]: E1215 06:55:03.068439 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerName="extract-utilities" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068447 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerName="extract-utilities" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068572 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbc129a2-a1bc-42b7-a431-55ca7eb91d23" containerName="registry-server" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068590 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="95d0b586-aa73-44cc-8007-33b96ce899fa" containerName="registry-server" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068602 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1e28ae9-dd4c-4120-975f-5df7881b6880" containerName="route-controller-manager" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.068614 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a564eb2f-252e-4969-9704-af45f2667923" containerName="controller-manager" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.069284 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.071371 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.071992 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz"] Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.073042 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.074040 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.074356 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.076791 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.077052 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.077306 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.077311 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.077243 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.077174 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.077589 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.077931 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.080039 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.080551 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-679646c95-f2k85"] Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.088033 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz"] Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.091315 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.091845 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bgm7t" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerName="registry-server" containerID="cri-o://b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342" gracePeriod=2 Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.205507 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-config\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.205558 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-client-ca\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.205579 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-proxy-ca-bundles\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.205606 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-client-ca\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.205623 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ce82667-04f0-42b0-b2db-95ab9f093073-serving-cert\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.205640 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23cc6144-fc5d-4e33-8f94-888844fb6283-serving-cert\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.205656 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lfpn\" (UniqueName: \"kubernetes.io/projected/6ce82667-04f0-42b0-b2db-95ab9f093073-kube-api-access-2lfpn\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.205698 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5s7p\" (UniqueName: \"kubernetes.io/projected/23cc6144-fc5d-4e33-8f94-888844fb6283-kube-api-access-n5s7p\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.205747 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-config\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.309993 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-client-ca\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.310040 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ce82667-04f0-42b0-b2db-95ab9f093073-serving-cert\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.310062 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23cc6144-fc5d-4e33-8f94-888844fb6283-serving-cert\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.310082 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lfpn\" (UniqueName: \"kubernetes.io/projected/6ce82667-04f0-42b0-b2db-95ab9f093073-kube-api-access-2lfpn\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.310146 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5s7p\" (UniqueName: \"kubernetes.io/projected/23cc6144-fc5d-4e33-8f94-888844fb6283-kube-api-access-n5s7p\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.310169 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-config\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.310192 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-config\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.310209 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-client-ca\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.310229 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-proxy-ca-bundles\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.311723 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-proxy-ca-bundles\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.312889 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-client-ca\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.313419 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-client-ca\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.313980 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-config\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.314967 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-config\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.318517 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ce82667-04f0-42b0-b2db-95ab9f093073-serving-cert\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.318279 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23cc6144-fc5d-4e33-8f94-888844fb6283-serving-cert\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.333382 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5s7p\" (UniqueName: \"kubernetes.io/projected/23cc6144-fc5d-4e33-8f94-888844fb6283-kube-api-access-n5s7p\") pod \"controller-manager-679646c95-f2k85\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.338867 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lfpn\" (UniqueName: \"kubernetes.io/projected/6ce82667-04f0-42b0-b2db-95ab9f093073-kube-api-access-2lfpn\") pod \"route-controller-manager-7f8dcb5f49-n9gwz\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.385215 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.396496 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.455017 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.527422 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x84vd\" (UniqueName: \"kubernetes.io/projected/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-kube-api-access-x84vd\") pod \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.527574 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-utilities\") pod \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.528427 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-catalog-content\") pod \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\" (UID: \"9a81db39-a0c0-4be6-8e60-73fdf457b8b9\") " Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.530195 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-utilities" (OuterVolumeSpecName: "utilities") pod "9a81db39-a0c0-4be6-8e60-73fdf457b8b9" (UID: "9a81db39-a0c0-4be6-8e60-73fdf457b8b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.537992 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-kube-api-access-x84vd" (OuterVolumeSpecName: "kube-api-access-x84vd") pod "9a81db39-a0c0-4be6-8e60-73fdf457b8b9" (UID: "9a81db39-a0c0-4be6-8e60-73fdf457b8b9"). InnerVolumeSpecName "kube-api-access-x84vd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.630890 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.631698 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x84vd\" (UniqueName: \"kubernetes.io/projected/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-kube-api-access-x84vd\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.652259 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a81db39-a0c0-4be6-8e60-73fdf457b8b9" (UID: "9a81db39-a0c0-4be6-8e60-73fdf457b8b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.733081 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a81db39-a0c0-4be6-8e60-73fdf457b8b9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.891157 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz"] Dec 15 06:55:03 crc kubenswrapper[4876]: W1215 06:55:03.891642 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ce82667_04f0_42b0_b2db_95ab9f093073.slice/crio-240c4cf83267cfb3eaa72efe06237aab1eae345750a49b9537e5424d78fcfbe0 WatchSource:0}: Error finding container 240c4cf83267cfb3eaa72efe06237aab1eae345750a49b9537e5424d78fcfbe0: Status 404 returned error can't find the container with id 240c4cf83267cfb3eaa72efe06237aab1eae345750a49b9537e5424d78fcfbe0 Dec 15 06:55:03 crc kubenswrapper[4876]: I1215 06:55:03.949053 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-679646c95-f2k85"] Dec 15 06:55:03 crc kubenswrapper[4876]: W1215 06:55:03.958090 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23cc6144_fc5d_4e33_8f94_888844fb6283.slice/crio-ca466426ce6265e03ceb0c0aa2a45e7d8bf0b3e25855e1d0da835bd2db6a7b80 WatchSource:0}: Error finding container ca466426ce6265e03ceb0c0aa2a45e7d8bf0b3e25855e1d0da835bd2db6a7b80: Status 404 returned error can't find the container with id ca466426ce6265e03ceb0c0aa2a45e7d8bf0b3e25855e1d0da835bd2db6a7b80 Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.101261 4876 generic.go:334] "Generic (PLEG): container finished" podID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerID="b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342" exitCode=0 Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.101382 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bgm7t" event={"ID":"9a81db39-a0c0-4be6-8e60-73fdf457b8b9","Type":"ContainerDied","Data":"b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342"} Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.101412 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bgm7t" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.101758 4876 scope.go:117] "RemoveContainer" containerID="b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.101716 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bgm7t" event={"ID":"9a81db39-a0c0-4be6-8e60-73fdf457b8b9","Type":"ContainerDied","Data":"1fc3b249ded6d4c9284351de17415c3608a189c6508d34da9de5f21ffe54b5ca"} Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.108489 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" event={"ID":"6ce82667-04f0-42b0-b2db-95ab9f093073","Type":"ContainerStarted","Data":"545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d"} Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.108554 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.108581 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" event={"ID":"6ce82667-04f0-42b0-b2db-95ab9f093073","Type":"ContainerStarted","Data":"240c4cf83267cfb3eaa72efe06237aab1eae345750a49b9537e5424d78fcfbe0"} Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.111240 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" event={"ID":"23cc6144-fc5d-4e33-8f94-888844fb6283","Type":"ContainerStarted","Data":"bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808"} Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.111311 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" event={"ID":"23cc6144-fc5d-4e33-8f94-888844fb6283","Type":"ContainerStarted","Data":"ca466426ce6265e03ceb0c0aa2a45e7d8bf0b3e25855e1d0da835bd2db6a7b80"} Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.111331 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.111321 4876 patch_prober.go:28] interesting pod/route-controller-manager-7f8dcb5f49-n9gwz container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" start-of-body= Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.111387 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" podUID="6ce82667-04f0-42b0-b2db-95ab9f093073" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.62:8443/healthz\": dial tcp 10.217.0.62:8443: connect: connection refused" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.114133 4876 patch_prober.go:28] interesting pod/controller-manager-679646c95-f2k85 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" start-of-body= Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.114189 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" podUID="23cc6144-fc5d-4e33-8f94-888844fb6283" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.117936 4876 scope.go:117] "RemoveContainer" containerID="afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.137324 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" podStartSLOduration=3.13729676 podStartE2EDuration="3.13729676s" podCreationTimestamp="2025-12-15 06:55:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:55:04.133295543 +0000 UTC m=+229.704438454" watchObservedRunningTime="2025-12-15 06:55:04.13729676 +0000 UTC m=+229.708439691" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.139016 4876 scope.go:117] "RemoveContainer" containerID="fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.160635 4876 scope.go:117] "RemoveContainer" containerID="b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.160713 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" podStartSLOduration=3.160695014 podStartE2EDuration="3.160695014s" podCreationTimestamp="2025-12-15 06:55:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:55:04.156287807 +0000 UTC m=+229.727430718" watchObservedRunningTime="2025-12-15 06:55:04.160695014 +0000 UTC m=+229.731837925" Dec 15 06:55:04 crc kubenswrapper[4876]: E1215 06:55:04.161825 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342\": container with ID starting with b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342 not found: ID does not exist" containerID="b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.161910 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342"} err="failed to get container status \"b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342\": rpc error: code = NotFound desc = could not find container \"b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342\": container with ID starting with b2de6a0a7553060e8cf5b78e833a2653e1977413584dd7e01584ec58a50c4342 not found: ID does not exist" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.161969 4876 scope.go:117] "RemoveContainer" containerID="afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c" Dec 15 06:55:04 crc kubenswrapper[4876]: E1215 06:55:04.165672 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c\": container with ID starting with afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c not found: ID does not exist" containerID="afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.165705 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c"} err="failed to get container status \"afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c\": rpc error: code = NotFound desc = could not find container \"afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c\": container with ID starting with afe015c115ae23c407a08269a9759b7bde5e407ac73e04c66acb346daa4df00c not found: ID does not exist" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.165745 4876 scope.go:117] "RemoveContainer" containerID="fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30" Dec 15 06:55:04 crc kubenswrapper[4876]: E1215 06:55:04.166324 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30\": container with ID starting with fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30 not found: ID does not exist" containerID="fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.166372 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30"} err="failed to get container status \"fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30\": rpc error: code = NotFound desc = could not find container \"fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30\": container with ID starting with fd56caf7c617d1a6de2f60fddd4f38d7d6cc75245f39dbccabef60d44aad3f30 not found: ID does not exist" Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.170300 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bgm7t"] Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.176364 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bgm7t"] Dec 15 06:55:04 crc kubenswrapper[4876]: I1215 06:55:04.712617 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" path="/var/lib/kubelet/pods/9a81db39-a0c0-4be6-8e60-73fdf457b8b9/volumes" Dec 15 06:55:05 crc kubenswrapper[4876]: I1215 06:55:05.122723 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:55:05 crc kubenswrapper[4876]: I1215 06:55:05.123023 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.960979 4876 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 15 06:55:14 crc kubenswrapper[4876]: E1215 06:55:14.962014 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerName="extract-content" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.962037 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerName="extract-content" Dec 15 06:55:14 crc kubenswrapper[4876]: E1215 06:55:14.962078 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerName="extract-utilities" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.962092 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerName="extract-utilities" Dec 15 06:55:14 crc kubenswrapper[4876]: E1215 06:55:14.962142 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerName="registry-server" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.962161 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerName="registry-server" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.962397 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a81db39-a0c0-4be6-8e60-73fdf457b8b9" containerName="registry-server" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.963164 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.964233 4876 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.964626 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df" gracePeriod=15 Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.964756 4876 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.964813 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f" gracePeriod=15 Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.964867 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99" gracePeriod=15 Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.964905 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37" gracePeriod=15 Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.964948 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47" gracePeriod=15 Dec 15 06:55:14 crc kubenswrapper[4876]: E1215 06:55:14.965026 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965044 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 15 06:55:14 crc kubenswrapper[4876]: E1215 06:55:14.965067 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965083 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 15 06:55:14 crc kubenswrapper[4876]: E1215 06:55:14.965154 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965175 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 15 06:55:14 crc kubenswrapper[4876]: E1215 06:55:14.965196 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965214 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 15 06:55:14 crc kubenswrapper[4876]: E1215 06:55:14.965239 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965256 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 15 06:55:14 crc kubenswrapper[4876]: E1215 06:55:14.965275 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965293 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 15 06:55:14 crc kubenswrapper[4876]: E1215 06:55:14.965313 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965326 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965521 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965545 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965565 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965584 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965603 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 15 06:55:14 crc kubenswrapper[4876]: I1215 06:55:14.965622 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 15 06:55:15 crc kubenswrapper[4876]: E1215 06:55:15.035807 4876 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.70:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.106648 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.106695 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.106720 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.106740 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.106780 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.106806 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.106825 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.106911 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.183589 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.185718 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.186546 4876 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f" exitCode=0 Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.186578 4876 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99" exitCode=0 Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.186590 4876 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37" exitCode=0 Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.186596 4876 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47" exitCode=2 Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.186680 4876 scope.go:117] "RemoveContainer" containerID="c4cb954d68ab2e7d13d2621042b78fd9c734e70449eb2035ced58176e77c2d4c" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.190082 4876 generic.go:334] "Generic (PLEG): container finished" podID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" containerID="5ab861f88420786f049e0614335adc2eb9257cd5174d2f161124ba6164a36d97" exitCode=0 Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.190193 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"39ca6f1d-cafe-43e5-9d15-5175a55a73c8","Type":"ContainerDied","Data":"5ab861f88420786f049e0614335adc2eb9257cd5174d2f161124ba6164a36d97"} Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.191306 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.191841 4876 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.208696 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.208770 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.208821 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.208898 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.208921 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209001 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209090 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209175 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209247 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209326 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209346 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209387 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209397 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209432 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209443 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.209489 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: I1215 06:55:15.337387 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:15 crc kubenswrapper[4876]: E1215 06:55:15.374700 4876 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.70:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1881511d57bdab20 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-15 06:55:15.373894432 +0000 UTC m=+240.945037353,LastTimestamp:2025-12-15 06:55:15.373894432 +0000 UTC m=+240.945037353,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.206682 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.211300 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747"} Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.211396 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"3c63c3dfb944ee74f7762389b47ea4fc0b6c99f3ce878f085c971ade96e50509"} Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.213341 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:16 crc kubenswrapper[4876]: E1215 06:55:16.213343 4876 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.70:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.214141 4876 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.664235 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.665592 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.666262 4876 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.837159 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-var-lock\") pod \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.837281 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kubelet-dir\") pod \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.837404 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kube-api-access\") pod \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\" (UID: \"39ca6f1d-cafe-43e5-9d15-5175a55a73c8\") " Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.837488 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-var-lock" (OuterVolumeSpecName: "var-lock") pod "39ca6f1d-cafe-43e5-9d15-5175a55a73c8" (UID: "39ca6f1d-cafe-43e5-9d15-5175a55a73c8"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.837501 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "39ca6f1d-cafe-43e5-9d15-5175a55a73c8" (UID: "39ca6f1d-cafe-43e5-9d15-5175a55a73c8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.838343 4876 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-var-lock\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.838366 4876 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.844000 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "39ca6f1d-cafe-43e5-9d15-5175a55a73c8" (UID: "39ca6f1d-cafe-43e5-9d15-5175a55a73c8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:55:16 crc kubenswrapper[4876]: I1215 06:55:16.939402 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/39ca6f1d-cafe-43e5-9d15-5175a55a73c8-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.220313 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"39ca6f1d-cafe-43e5-9d15-5175a55a73c8","Type":"ContainerDied","Data":"125fb0e49eee0ae112edc6593dde50f7d0311cc630d137609a93a7f410996949"} Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.220842 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="125fb0e49eee0ae112edc6593dde50f7d0311cc630d137609a93a7f410996949" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.220434 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.258366 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.341733 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.342736 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.343358 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.343822 4876 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.451271 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.451361 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.451434 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.451546 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.451592 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.451694 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.451994 4876 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.452032 4876 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.452050 4876 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:17 crc kubenswrapper[4876]: E1215 06:55:17.813437 4876 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:17 crc kubenswrapper[4876]: E1215 06:55:17.814091 4876 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:17 crc kubenswrapper[4876]: E1215 06:55:17.814811 4876 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:17 crc kubenswrapper[4876]: E1215 06:55:17.815384 4876 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:17 crc kubenswrapper[4876]: E1215 06:55:17.815837 4876 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:17 crc kubenswrapper[4876]: I1215 06:55:17.815885 4876 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 15 06:55:17 crc kubenswrapper[4876]: E1215 06:55:17.816490 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" interval="200ms" Dec 15 06:55:18 crc kubenswrapper[4876]: E1215 06:55:18.017813 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" interval="400ms" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.229152 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.230346 4876 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df" exitCode=0 Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.230419 4876 scope.go:117] "RemoveContainer" containerID="8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.230434 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.247536 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.247871 4876 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.252266 4876 scope.go:117] "RemoveContainer" containerID="501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.267119 4876 scope.go:117] "RemoveContainer" containerID="54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.279574 4876 scope.go:117] "RemoveContainer" containerID="8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.293927 4876 scope.go:117] "RemoveContainer" containerID="7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.309523 4876 scope.go:117] "RemoveContainer" containerID="179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.338325 4876 scope.go:117] "RemoveContainer" containerID="8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f" Dec 15 06:55:18 crc kubenswrapper[4876]: E1215 06:55:18.339157 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\": container with ID starting with 8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f not found: ID does not exist" containerID="8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.339233 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f"} err="failed to get container status \"8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\": rpc error: code = NotFound desc = could not find container \"8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f\": container with ID starting with 8ac49a879e33192870ec909599e6a245e7cc52a0b4648a7326cf24ba9076908f not found: ID does not exist" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.339287 4876 scope.go:117] "RemoveContainer" containerID="501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99" Dec 15 06:55:18 crc kubenswrapper[4876]: E1215 06:55:18.339584 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\": container with ID starting with 501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99 not found: ID does not exist" containerID="501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.339615 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99"} err="failed to get container status \"501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\": rpc error: code = NotFound desc = could not find container \"501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99\": container with ID starting with 501210e53d025ee21c9d7ec3e542d650516e36ac6000fd8c5504cc5b0ced2a99 not found: ID does not exist" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.339656 4876 scope.go:117] "RemoveContainer" containerID="54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37" Dec 15 06:55:18 crc kubenswrapper[4876]: E1215 06:55:18.340024 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\": container with ID starting with 54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37 not found: ID does not exist" containerID="54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.340094 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37"} err="failed to get container status \"54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\": rpc error: code = NotFound desc = could not find container \"54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37\": container with ID starting with 54b2774630c8277d08cb49e31366bd334b9674ea0a68d774fb90399a86143d37 not found: ID does not exist" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.340138 4876 scope.go:117] "RemoveContainer" containerID="8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47" Dec 15 06:55:18 crc kubenswrapper[4876]: E1215 06:55:18.340459 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\": container with ID starting with 8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47 not found: ID does not exist" containerID="8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.340492 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47"} err="failed to get container status \"8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\": rpc error: code = NotFound desc = could not find container \"8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47\": container with ID starting with 8e5495b751d517abba95185232a4e42b161e7b46b621338d097d7f227d9cac47 not found: ID does not exist" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.340510 4876 scope.go:117] "RemoveContainer" containerID="7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df" Dec 15 06:55:18 crc kubenswrapper[4876]: E1215 06:55:18.341413 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\": container with ID starting with 7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df not found: ID does not exist" containerID="7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.341454 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df"} err="failed to get container status \"7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\": rpc error: code = NotFound desc = could not find container \"7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df\": container with ID starting with 7f250abe1afbacd59c2302eca81be07757a0ff336804871d1c8370f0d3be77df not found: ID does not exist" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.341515 4876 scope.go:117] "RemoveContainer" containerID="179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2" Dec 15 06:55:18 crc kubenswrapper[4876]: E1215 06:55:18.341882 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\": container with ID starting with 179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2 not found: ID does not exist" containerID="179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.341907 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2"} err="failed to get container status \"179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\": rpc error: code = NotFound desc = could not find container \"179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2\": container with ID starting with 179268dbbec59fe7b835b7e1cbb3cbc1946c7e264934b9c6cc3cb62a4b74d4e2 not found: ID does not exist" Dec 15 06:55:18 crc kubenswrapper[4876]: E1215 06:55:18.418482 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" interval="800ms" Dec 15 06:55:18 crc kubenswrapper[4876]: I1215 06:55:18.718056 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 15 06:55:19 crc kubenswrapper[4876]: E1215 06:55:19.220300 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" interval="1.6s" Dec 15 06:55:20 crc kubenswrapper[4876]: E1215 06:55:20.821625 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" interval="3.2s" Dec 15 06:55:23 crc kubenswrapper[4876]: I1215 06:55:23.466284 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" containerName="oauth-openshift" containerID="cri-o://5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1" gracePeriod=15 Dec 15 06:55:24 crc kubenswrapper[4876]: E1215 06:55:24.023512 4876 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.70:6443: connect: connection refused" interval="6.4s" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.078654 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.079326 4876 status_manager.go:851] "Failed to get status for pod" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-bkzhk\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.079768 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.247225 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-ocp-branding-template\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.247289 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-dir\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.247369 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-router-certs\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.247407 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bh8lr\" (UniqueName: \"kubernetes.io/projected/e6f138ff-658f-41f4-8067-72f1882a25a5-kube-api-access-bh8lr\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.247447 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-service-ca\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.247480 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-trusted-ca-bundle\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.247523 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-provider-selection\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.247565 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-idp-0-file-data\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.247606 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-serving-cert\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.248074 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-error\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.247838 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.248142 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-login\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.248182 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-cliconfig\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.248214 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-policies\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.248250 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-session\") pod \"e6f138ff-658f-41f4-8067-72f1882a25a5\" (UID: \"e6f138ff-658f-41f4-8067-72f1882a25a5\") " Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.248621 4876 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.249211 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.249343 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.250395 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.250670 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.255073 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.257481 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.258620 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.259366 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.262747 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.263569 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.263751 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.263993 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.266642 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6f138ff-658f-41f4-8067-72f1882a25a5-kube-api-access-bh8lr" (OuterVolumeSpecName: "kube-api-access-bh8lr") pod "e6f138ff-658f-41f4-8067-72f1882a25a5" (UID: "e6f138ff-658f-41f4-8067-72f1882a25a5"). InnerVolumeSpecName "kube-api-access-bh8lr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.295741 4876 generic.go:334] "Generic (PLEG): container finished" podID="e6f138ff-658f-41f4-8067-72f1882a25a5" containerID="5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1" exitCode=0 Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.295792 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" event={"ID":"e6f138ff-658f-41f4-8067-72f1882a25a5","Type":"ContainerDied","Data":"5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1"} Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.295848 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" event={"ID":"e6f138ff-658f-41f4-8067-72f1882a25a5","Type":"ContainerDied","Data":"959659033ceac615e7b79e3e45a2eef5387be62a17119c49ca761ec77c60b571"} Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.295845 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.295866 4876 scope.go:117] "RemoveContainer" containerID="5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.297886 4876 status_manager.go:851] "Failed to get status for pod" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-bkzhk\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.299751 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.322676 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.323355 4876 status_manager.go:851] "Failed to get status for pod" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-bkzhk\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.331346 4876 scope.go:117] "RemoveContainer" containerID="5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1" Dec 15 06:55:24 crc kubenswrapper[4876]: E1215 06:55:24.331931 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1\": container with ID starting with 5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1 not found: ID does not exist" containerID="5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.331986 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1"} err="failed to get container status \"5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1\": rpc error: code = NotFound desc = could not find container \"5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1\": container with ID starting with 5c29876dc280120cd45a3e710aaaad729c5f6fee5f54860910bceef614e536b1 not found: ID does not exist" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349607 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349670 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349698 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349723 4876 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349744 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349767 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349786 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349805 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bh8lr\" (UniqueName: \"kubernetes.io/projected/e6f138ff-658f-41f4-8067-72f1882a25a5-kube-api-access-bh8lr\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349827 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349847 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349868 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349890 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.349909 4876 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e6f138ff-658f-41f4-8067-72f1882a25a5-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:55:24 crc kubenswrapper[4876]: E1215 06:55:24.648612 4876 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.70:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.1881511d57bdab20 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-15 06:55:15.373894432 +0000 UTC m=+240.945037353,LastTimestamp:2025-12-15 06:55:15.373894432 +0000 UTC m=+240.945037353,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.711284 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:24 crc kubenswrapper[4876]: I1215 06:55:24.711680 4876 status_manager.go:851] "Failed to get status for pod" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-bkzhk\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:26 crc kubenswrapper[4876]: I1215 06:55:26.705242 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:26 crc kubenswrapper[4876]: I1215 06:55:26.706799 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:26 crc kubenswrapper[4876]: I1215 06:55:26.707326 4876 status_manager.go:851] "Failed to get status for pod" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-bkzhk\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:26 crc kubenswrapper[4876]: I1215 06:55:26.731778 4876 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:26 crc kubenswrapper[4876]: I1215 06:55:26.731825 4876 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:26 crc kubenswrapper[4876]: E1215 06:55:26.732318 4876 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:26 crc kubenswrapper[4876]: I1215 06:55:26.733138 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:26 crc kubenswrapper[4876]: W1215 06:55:26.766763 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-bd7195a7b62c85e7ba8743d7022d0a8e30c55d2ba1a7cf7d8919763fcb2b62c2 WatchSource:0}: Error finding container bd7195a7b62c85e7ba8743d7022d0a8e30c55d2ba1a7cf7d8919763fcb2b62c2: Status 404 returned error can't find the container with id bd7195a7b62c85e7ba8743d7022d0a8e30c55d2ba1a7cf7d8919763fcb2b62c2 Dec 15 06:55:27 crc kubenswrapper[4876]: I1215 06:55:27.319933 4876 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="d37b73320b6bb7c0c372b9259689776ed6a6f9ef3be183ffdba6470e8b9a309c" exitCode=0 Dec 15 06:55:27 crc kubenswrapper[4876]: I1215 06:55:27.320338 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"d37b73320b6bb7c0c372b9259689776ed6a6f9ef3be183ffdba6470e8b9a309c"} Dec 15 06:55:27 crc kubenswrapper[4876]: I1215 06:55:27.320368 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bd7195a7b62c85e7ba8743d7022d0a8e30c55d2ba1a7cf7d8919763fcb2b62c2"} Dec 15 06:55:27 crc kubenswrapper[4876]: I1215 06:55:27.320615 4876 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:27 crc kubenswrapper[4876]: I1215 06:55:27.320627 4876 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:27 crc kubenswrapper[4876]: E1215 06:55:27.321413 4876 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:27 crc kubenswrapper[4876]: I1215 06:55:27.321441 4876 status_manager.go:851] "Failed to get status for pod" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:27 crc kubenswrapper[4876]: I1215 06:55:27.321898 4876 status_manager.go:851] "Failed to get status for pod" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" pod="openshift-authentication/oauth-openshift-558db77b4-bkzhk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-bkzhk\": dial tcp 38.102.83.70:6443: connect: connection refused" Dec 15 06:55:28 crc kubenswrapper[4876]: I1215 06:55:28.337081 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"55fe6908d1ee72147e0e123993cac1feeb95d02a661dd3d9d4cdaf80e1631dc4"} Dec 15 06:55:28 crc kubenswrapper[4876]: I1215 06:55:28.337431 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"64fa88063d690c0eaeea3bfc7885afc90f6f6a2f9664c3f0aef11deb4febf694"} Dec 15 06:55:28 crc kubenswrapper[4876]: I1215 06:55:28.337443 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"52e472be1ced86c96fe9daef66a9c711e4e599890686c5e985a54a0a77302762"} Dec 15 06:55:29 crc kubenswrapper[4876]: I1215 06:55:29.345244 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"cc50c99233c719325610b938766f5e31829c21249e668a9ddcbfec91cc3b0340"} Dec 15 06:55:29 crc kubenswrapper[4876]: I1215 06:55:29.345294 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e71c5860d44e92b3a673364df102187a190335714dfe131adb7cd48ca0d53b74"} Dec 15 06:55:29 crc kubenswrapper[4876]: I1215 06:55:29.345421 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:29 crc kubenswrapper[4876]: I1215 06:55:29.345514 4876 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:29 crc kubenswrapper[4876]: I1215 06:55:29.345530 4876 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:30 crc kubenswrapper[4876]: I1215 06:55:30.356152 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 15 06:55:30 crc kubenswrapper[4876]: I1215 06:55:30.356209 4876 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f" exitCode=1 Dec 15 06:55:30 crc kubenswrapper[4876]: I1215 06:55:30.356239 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f"} Dec 15 06:55:30 crc kubenswrapper[4876]: I1215 06:55:30.356676 4876 scope.go:117] "RemoveContainer" containerID="c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f" Dec 15 06:55:31 crc kubenswrapper[4876]: I1215 06:55:31.368848 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 15 06:55:31 crc kubenswrapper[4876]: I1215 06:55:31.369257 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c7eff344c31eebf8f9b112cae5446f1ccca88bf9fe7b8428d4941d69fbaf4d53"} Dec 15 06:55:31 crc kubenswrapper[4876]: I1215 06:55:31.734341 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:31 crc kubenswrapper[4876]: I1215 06:55:31.734418 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:31 crc kubenswrapper[4876]: I1215 06:55:31.742017 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:34 crc kubenswrapper[4876]: I1215 06:55:34.256566 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:55:34 crc kubenswrapper[4876]: I1215 06:55:34.256863 4876 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 15 06:55:34 crc kubenswrapper[4876]: I1215 06:55:34.256935 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 15 06:55:34 crc kubenswrapper[4876]: I1215 06:55:34.355987 4876 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:34 crc kubenswrapper[4876]: I1215 06:55:34.393160 4876 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:34 crc kubenswrapper[4876]: I1215 06:55:34.393199 4876 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:34 crc kubenswrapper[4876]: I1215 06:55:34.398608 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:34 crc kubenswrapper[4876]: I1215 06:55:34.716024 4876 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="e03addb9-1a11-4ab5-a66a-bafac081e7af" Dec 15 06:55:35 crc kubenswrapper[4876]: I1215 06:55:35.399744 4876 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:35 crc kubenswrapper[4876]: I1215 06:55:35.399815 4876 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:35 crc kubenswrapper[4876]: I1215 06:55:35.404184 4876 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="e03addb9-1a11-4ab5-a66a-bafac081e7af" Dec 15 06:55:36 crc kubenswrapper[4876]: I1215 06:55:36.973877 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:55:42 crc kubenswrapper[4876]: I1215 06:55:42.812899 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 15 06:55:44 crc kubenswrapper[4876]: I1215 06:55:44.170030 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 15 06:55:44 crc kubenswrapper[4876]: I1215 06:55:44.256564 4876 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 15 06:55:44 crc kubenswrapper[4876]: I1215 06:55:44.256647 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 15 06:55:44 crc kubenswrapper[4876]: I1215 06:55:44.819917 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 15 06:55:45 crc kubenswrapper[4876]: I1215 06:55:45.165415 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 15 06:55:45 crc kubenswrapper[4876]: I1215 06:55:45.429769 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 15 06:55:45 crc kubenswrapper[4876]: I1215 06:55:45.679194 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 15 06:55:45 crc kubenswrapper[4876]: I1215 06:55:45.970006 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 15 06:55:46 crc kubenswrapper[4876]: I1215 06:55:46.050948 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 15 06:55:46 crc kubenswrapper[4876]: I1215 06:55:46.225062 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 15 06:55:46 crc kubenswrapper[4876]: I1215 06:55:46.758969 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 15 06:55:47 crc kubenswrapper[4876]: I1215 06:55:47.518839 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 15 06:55:47 crc kubenswrapper[4876]: I1215 06:55:47.687036 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.031590 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.048987 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.066770 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.112035 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.144509 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.383685 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.432558 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.515784 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.553206 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.630437 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.681936 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.786520 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.796510 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.823791 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.879892 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 15 06:55:48 crc kubenswrapper[4876]: I1215 06:55:48.944952 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.114886 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.170535 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.188843 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.188865 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.248991 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.407531 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.451195 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.492981 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.542868 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.550932 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.604144 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.662227 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.681676 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 15 06:55:49 crc kubenswrapper[4876]: I1215 06:55:49.890938 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.093463 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.131528 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.265187 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.484564 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.501869 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.511289 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.537433 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.573450 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.595842 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.769964 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.790204 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.797526 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.810438 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.845845 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.856676 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.875480 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.881435 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.950561 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 15 06:55:50 crc kubenswrapper[4876]: I1215 06:55:50.968054 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.037588 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.059955 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.067362 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.082357 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.085193 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.168960 4876 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.185842 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.191378 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.207473 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.262083 4876 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.464741 4876 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.526535 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.753639 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.764974 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.793761 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.808889 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.824944 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.938014 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 15 06:55:51 crc kubenswrapper[4876]: I1215 06:55:51.980411 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.005868 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.017563 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.026515 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.035844 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.112939 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.174317 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.226885 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.289950 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.340950 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.512602 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.532436 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.607574 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.663455 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.721865 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.735955 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.786481 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.826100 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.881849 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.943888 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 15 06:55:52 crc kubenswrapper[4876]: I1215 06:55:52.981336 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.003525 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.059328 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.092832 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.150386 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.294934 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.343280 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.495991 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.517428 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.670575 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.697137 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.722685 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.808590 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.820020 4876 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.827143 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.879537 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.899365 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.915864 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 15 06:55:53 crc kubenswrapper[4876]: I1215 06:55:53.962568 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.034633 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.065628 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.084248 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.152659 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.212578 4876 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.216716 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-bkzhk"] Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.216774 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-5b9d67559d-sxz7j"] Dec 15 06:55:54 crc kubenswrapper[4876]: E1215 06:55:54.216949 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" containerName="installer" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.216969 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" containerName="installer" Dec 15 06:55:54 crc kubenswrapper[4876]: E1215 06:55:54.216980 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" containerName="oauth-openshift" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.216988 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" containerName="oauth-openshift" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.217124 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="39ca6f1d-cafe-43e5-9d15-5175a55a73c8" containerName="installer" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.217135 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" containerName="oauth-openshift" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.217500 4876 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.217549 4876 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f3a728ba-8b04-4c1d-8c0a-0518c330f7b5" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.217594 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.219700 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.219863 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.220187 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.220218 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.220220 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.220646 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.220703 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.220718 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.221215 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.222364 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.222694 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.223572 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.226128 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.240959 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.242550 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.246587 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=20.246566714 podStartE2EDuration="20.246566714s" podCreationTimestamp="2025-12-15 06:55:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:55:54.241821417 +0000 UTC m=+279.812964338" watchObservedRunningTime="2025-12-15 06:55:54.246566714 +0000 UTC m=+279.817709635" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.253270 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.257698 4876 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.257767 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.257831 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.258810 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"c7eff344c31eebf8f9b112cae5446f1ccca88bf9fe7b8428d4941d69fbaf4d53"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.259058 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://c7eff344c31eebf8f9b112cae5446f1ccca88bf9fe7b8428d4941d69fbaf4d53" gracePeriod=30 Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263642 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-service-ca\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263688 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-template-error\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263751 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263784 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-router-certs\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263813 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263839 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-template-login\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263859 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-session\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263878 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263895 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263915 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-audit-policies\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263930 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4gsd\" (UniqueName: \"kubernetes.io/projected/a68e78a1-4811-4860-957c-905b59e6fdfd-kube-api-access-z4gsd\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263951 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.263981 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a68e78a1-4811-4860-957c-905b59e6fdfd-audit-dir\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.264204 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.264452 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.323488 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.365383 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a68e78a1-4811-4860-957c-905b59e6fdfd-audit-dir\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.365739 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.366006 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-service-ca\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.366256 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-template-error\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.366516 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.365549 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a68e78a1-4811-4860-957c-905b59e6fdfd-audit-dir\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.366708 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-router-certs\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.366972 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367017 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-template-login\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367045 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-session\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367078 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367124 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367152 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-audit-policies\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367178 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4gsd\" (UniqueName: \"kubernetes.io/projected/a68e78a1-4811-4860-957c-905b59e6fdfd-kube-api-access-z4gsd\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367218 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367371 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367716 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-service-ca\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367732 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.367916 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a68e78a1-4811-4860-957c-905b59e6fdfd-audit-policies\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.372015 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.373045 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-template-error\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.373217 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.373622 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-router-certs\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.374791 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-template-login\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.377506 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-session\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.377857 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.377919 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a68e78a1-4811-4860-957c-905b59e6fdfd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.384999 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4gsd\" (UniqueName: \"kubernetes.io/projected/a68e78a1-4811-4860-957c-905b59e6fdfd-kube-api-access-z4gsd\") pod \"oauth-openshift-5b9d67559d-sxz7j\" (UID: \"a68e78a1-4811-4860-957c-905b59e6fdfd\") " pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.552425 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.636246 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.711275 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.711955 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6f138ff-658f-41f4-8067-72f1882a25a5" path="/var/lib/kubelet/pods/e6f138ff-658f-41f4-8067-72f1882a25a5/volumes" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.769489 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 15 06:55:54 crc kubenswrapper[4876]: I1215 06:55:54.898188 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.002158 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.030745 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.248266 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.263327 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.308007 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.421932 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.464703 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.481896 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.509213 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.528290 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.555326 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.562309 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.705488 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.765333 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.868978 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.878624 4876 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 15 06:55:55 crc kubenswrapper[4876]: I1215 06:55:55.967814 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.060613 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.072293 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.109724 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.179725 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.192457 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.199767 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.208289 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.231004 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.304206 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.370927 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.483558 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.501452 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.544162 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.566637 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.598055 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.639364 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.746931 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.769829 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.780139 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.819611 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.837301 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.843077 4876 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.843452 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747" gracePeriod=5 Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.930738 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 15 06:55:56 crc kubenswrapper[4876]: I1215 06:55:56.932322 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.025330 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.047181 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.100654 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.105185 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.106480 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.120495 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.200874 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.262181 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.330627 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.426724 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.431277 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.464881 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.484717 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.557545 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.562219 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.706719 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.731113 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.774509 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 15 06:55:57 crc kubenswrapper[4876]: E1215 06:55:57.791250 4876 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 15 06:55:57 crc kubenswrapper[4876]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-5b9d67559d-sxz7j_openshift-authentication_a68e78a1-4811-4860-957c-905b59e6fdfd_0(1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e): error adding pod openshift-authentication_oauth-openshift-5b9d67559d-sxz7j to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e" Netns:"/var/run/netns/336f45ab-880b-48da-a003-02c59590cebe" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-5b9d67559d-sxz7j;K8S_POD_INFRA_CONTAINER_ID=1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e;K8S_POD_UID=a68e78a1-4811-4860-957c-905b59e6fdfd" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-5b9d67559d-sxz7j] networking: Multus: [openshift-authentication/oauth-openshift-5b9d67559d-sxz7j/a68e78a1-4811-4860-957c-905b59e6fdfd]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-5b9d67559d-sxz7j in out of cluster comm: pod "oauth-openshift-5b9d67559d-sxz7j" not found Dec 15 06:55:57 crc kubenswrapper[4876]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 15 06:55:57 crc kubenswrapper[4876]: > Dec 15 06:55:57 crc kubenswrapper[4876]: E1215 06:55:57.791653 4876 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 15 06:55:57 crc kubenswrapper[4876]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-5b9d67559d-sxz7j_openshift-authentication_a68e78a1-4811-4860-957c-905b59e6fdfd_0(1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e): error adding pod openshift-authentication_oauth-openshift-5b9d67559d-sxz7j to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e" Netns:"/var/run/netns/336f45ab-880b-48da-a003-02c59590cebe" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-5b9d67559d-sxz7j;K8S_POD_INFRA_CONTAINER_ID=1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e;K8S_POD_UID=a68e78a1-4811-4860-957c-905b59e6fdfd" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-5b9d67559d-sxz7j] networking: Multus: [openshift-authentication/oauth-openshift-5b9d67559d-sxz7j/a68e78a1-4811-4860-957c-905b59e6fdfd]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-5b9d67559d-sxz7j in out of cluster comm: pod "oauth-openshift-5b9d67559d-sxz7j" not found Dec 15 06:55:57 crc kubenswrapper[4876]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 15 06:55:57 crc kubenswrapper[4876]: > pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:57 crc kubenswrapper[4876]: E1215 06:55:57.791674 4876 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 15 06:55:57 crc kubenswrapper[4876]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-5b9d67559d-sxz7j_openshift-authentication_a68e78a1-4811-4860-957c-905b59e6fdfd_0(1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e): error adding pod openshift-authentication_oauth-openshift-5b9d67559d-sxz7j to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e" Netns:"/var/run/netns/336f45ab-880b-48da-a003-02c59590cebe" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-5b9d67559d-sxz7j;K8S_POD_INFRA_CONTAINER_ID=1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e;K8S_POD_UID=a68e78a1-4811-4860-957c-905b59e6fdfd" Path:"" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-5b9d67559d-sxz7j] networking: Multus: [openshift-authentication/oauth-openshift-5b9d67559d-sxz7j/a68e78a1-4811-4860-957c-905b59e6fdfd]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-5b9d67559d-sxz7j in out of cluster comm: pod "oauth-openshift-5b9d67559d-sxz7j" not found Dec 15 06:55:57 crc kubenswrapper[4876]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 15 06:55:57 crc kubenswrapper[4876]: > pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:55:57 crc kubenswrapper[4876]: E1215 06:55:57.791723 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"oauth-openshift-5b9d67559d-sxz7j_openshift-authentication(a68e78a1-4811-4860-957c-905b59e6fdfd)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"oauth-openshift-5b9d67559d-sxz7j_openshift-authentication(a68e78a1-4811-4860-957c-905b59e6fdfd)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_oauth-openshift-5b9d67559d-sxz7j_openshift-authentication_a68e78a1-4811-4860-957c-905b59e6fdfd_0(1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e): error adding pod openshift-authentication_oauth-openshift-5b9d67559d-sxz7j to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e\\\" Netns:\\\"/var/run/netns/336f45ab-880b-48da-a003-02c59590cebe\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-authentication;K8S_POD_NAME=oauth-openshift-5b9d67559d-sxz7j;K8S_POD_INFRA_CONTAINER_ID=1fa026467c5367ef87b875643ce144806237fc8dfb2c4e9df4a8f09d794dfe6e;K8S_POD_UID=a68e78a1-4811-4860-957c-905b59e6fdfd\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-authentication/oauth-openshift-5b9d67559d-sxz7j] networking: Multus: [openshift-authentication/oauth-openshift-5b9d67559d-sxz7j/a68e78a1-4811-4860-957c-905b59e6fdfd]: error setting the networks status, pod was already deleted: SetPodNetworkStatusAnnotation: failed to query the pod oauth-openshift-5b9d67559d-sxz7j in out of cluster comm: pod \\\"oauth-openshift-5b9d67559d-sxz7j\\\" not found\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" podUID="a68e78a1-4811-4860-957c-905b59e6fdfd" Dec 15 06:55:57 crc kubenswrapper[4876]: I1215 06:55:57.998830 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.099476 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.101218 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.218018 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.493641 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.517882 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.632276 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.680977 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.700659 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.701938 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.729723 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.764217 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.767237 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.783544 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 15 06:55:58 crc kubenswrapper[4876]: I1215 06:55:58.884998 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 15 06:55:59 crc kubenswrapper[4876]: I1215 06:55:59.433482 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 15 06:55:59 crc kubenswrapper[4876]: I1215 06:55:59.443430 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 15 06:55:59 crc kubenswrapper[4876]: I1215 06:55:59.476390 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 15 06:55:59 crc kubenswrapper[4876]: I1215 06:55:59.530246 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 15 06:55:59 crc kubenswrapper[4876]: I1215 06:55:59.667303 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 15 06:55:59 crc kubenswrapper[4876]: I1215 06:55:59.812765 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 15 06:55:59 crc kubenswrapper[4876]: I1215 06:55:59.825382 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 15 06:55:59 crc kubenswrapper[4876]: I1215 06:55:59.847924 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 15 06:55:59 crc kubenswrapper[4876]: I1215 06:55:59.891880 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 15 06:55:59 crc kubenswrapper[4876]: I1215 06:55:59.986626 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.134375 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.171841 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.292196 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.356385 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.405695 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.421645 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.443411 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.680748 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.686386 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.695687 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.730786 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.731603 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.812659 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 15 06:56:00 crc kubenswrapper[4876]: I1215 06:56:00.997882 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 15 06:56:01 crc kubenswrapper[4876]: I1215 06:56:01.410962 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 15 06:56:01 crc kubenswrapper[4876]: I1215 06:56:01.414687 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 15 06:56:01 crc kubenswrapper[4876]: I1215 06:56:01.519742 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 15 06:56:01 crc kubenswrapper[4876]: I1215 06:56:01.840499 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 15 06:56:01 crc kubenswrapper[4876]: I1215 06:56:01.968023 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.417405 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.417504 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.491910 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.492067 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.492146 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.492241 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.492282 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.492145 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.492200 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.492635 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.492726 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.493055 4876 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.493100 4876 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.493162 4876 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.493184 4876 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.502161 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.558334 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.589498 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.589566 4876 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747" exitCode=137 Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.589620 4876 scope.go:117] "RemoveContainer" containerID="2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.589740 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.595044 4876 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.620695 4876 scope.go:117] "RemoveContainer" containerID="2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747" Dec 15 06:56:02 crc kubenswrapper[4876]: E1215 06:56:02.621447 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747\": container with ID starting with 2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747 not found: ID does not exist" containerID="2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.621511 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747"} err="failed to get container status \"2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747\": rpc error: code = NotFound desc = could not find container \"2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747\": container with ID starting with 2fefd64dbb07ae477e3aba93026cf7c035bbf6e4197b0d6b26aaec5231100747 not found: ID does not exist" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.680535 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.717355 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 15 06:56:02 crc kubenswrapper[4876]: I1215 06:56:02.893224 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 15 06:56:03 crc kubenswrapper[4876]: I1215 06:56:03.065730 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 15 06:56:03 crc kubenswrapper[4876]: I1215 06:56:03.571614 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 15 06:56:03 crc kubenswrapper[4876]: I1215 06:56:03.799760 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 15 06:56:08 crc kubenswrapper[4876]: I1215 06:56:08.705226 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:56:08 crc kubenswrapper[4876]: I1215 06:56:08.705603 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:56:09 crc kubenswrapper[4876]: I1215 06:56:09.150005 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5b9d67559d-sxz7j"] Dec 15 06:56:09 crc kubenswrapper[4876]: I1215 06:56:09.642765 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" event={"ID":"a68e78a1-4811-4860-957c-905b59e6fdfd","Type":"ContainerStarted","Data":"b4a915d9ee565d92d3fb5d803343446f37b0d667add7dbcec96c8777b8d16c0b"} Dec 15 06:56:09 crc kubenswrapper[4876]: I1215 06:56:09.643362 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" event={"ID":"a68e78a1-4811-4860-957c-905b59e6fdfd","Type":"ContainerStarted","Data":"77bcae567f38b2cc4a0d641eb95ea9400f015ad5f08a8cafc3d6ff3f300311a0"} Dec 15 06:56:09 crc kubenswrapper[4876]: I1215 06:56:09.643389 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:56:09 crc kubenswrapper[4876]: I1215 06:56:09.666166 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" podStartSLOduration=71.666141647 podStartE2EDuration="1m11.666141647s" podCreationTimestamp="2025-12-15 06:54:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:56:09.660869615 +0000 UTC m=+295.232012526" watchObservedRunningTime="2025-12-15 06:56:09.666141647 +0000 UTC m=+295.237284558" Dec 15 06:56:09 crc kubenswrapper[4876]: I1215 06:56:09.956414 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5b9d67559d-sxz7j" Dec 15 06:56:18 crc kubenswrapper[4876]: I1215 06:56:18.719902 4876 generic.go:334] "Generic (PLEG): container finished" podID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerID="868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa" exitCode=0 Dec 15 06:56:18 crc kubenswrapper[4876]: I1215 06:56:18.720098 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" event={"ID":"47b07d03-5c39-4966-8850-ed81bcfc8e94","Type":"ContainerDied","Data":"868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa"} Dec 15 06:56:18 crc kubenswrapper[4876]: I1215 06:56:18.721578 4876 scope.go:117] "RemoveContainer" containerID="868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa" Dec 15 06:56:19 crc kubenswrapper[4876]: I1215 06:56:19.727693 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" event={"ID":"47b07d03-5c39-4966-8850-ed81bcfc8e94","Type":"ContainerStarted","Data":"8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf"} Dec 15 06:56:19 crc kubenswrapper[4876]: I1215 06:56:19.729302 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:56:19 crc kubenswrapper[4876]: I1215 06:56:19.732775 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:56:24 crc kubenswrapper[4876]: I1215 06:56:24.770839 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 15 06:56:24 crc kubenswrapper[4876]: I1215 06:56:24.773879 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 15 06:56:24 crc kubenswrapper[4876]: I1215 06:56:24.773969 4876 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="c7eff344c31eebf8f9b112cae5446f1ccca88bf9fe7b8428d4941d69fbaf4d53" exitCode=137 Dec 15 06:56:24 crc kubenswrapper[4876]: I1215 06:56:24.774019 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"c7eff344c31eebf8f9b112cae5446f1ccca88bf9fe7b8428d4941d69fbaf4d53"} Dec 15 06:56:24 crc kubenswrapper[4876]: I1215 06:56:24.774070 4876 scope.go:117] "RemoveContainer" containerID="c0efbd72f000afde14d7ae88687b155ebc2cc9e179d1e5dfa4aaa67c12e0f28f" Dec 15 06:56:25 crc kubenswrapper[4876]: I1215 06:56:25.783038 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 15 06:56:25 crc kubenswrapper[4876]: I1215 06:56:25.785294 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ff79310e1283d36d8f0b0fb3521cbed27552e589091b02221f6cb8b763d025b5"} Dec 15 06:56:26 crc kubenswrapper[4876]: I1215 06:56:26.973342 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:56:34 crc kubenswrapper[4876]: I1215 06:56:34.256810 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:56:34 crc kubenswrapper[4876]: I1215 06:56:34.264037 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:56:36 crc kubenswrapper[4876]: I1215 06:56:36.979953 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.133088 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-679646c95-f2k85"] Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.133978 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" podUID="23cc6144-fc5d-4e33-8f94-888844fb6283" containerName="controller-manager" containerID="cri-o://bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808" gracePeriod=30 Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.137843 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz"] Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.138134 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" podUID="6ce82667-04f0-42b0-b2db-95ab9f093073" containerName="route-controller-manager" containerID="cri-o://545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d" gracePeriod=30 Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.385712 4876 patch_prober.go:28] interesting pod/controller-manager-679646c95-f2k85 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" start-of-body= Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.386150 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" podUID="23cc6144-fc5d-4e33-8f94-888844fb6283" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.618417 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.624438 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.724937 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-client-ca\") pod \"6ce82667-04f0-42b0-b2db-95ab9f093073\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.725522 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23cc6144-fc5d-4e33-8f94-888844fb6283-serving-cert\") pod \"23cc6144-fc5d-4e33-8f94-888844fb6283\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.725582 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-config\") pod \"23cc6144-fc5d-4e33-8f94-888844fb6283\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.725623 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lfpn\" (UniqueName: \"kubernetes.io/projected/6ce82667-04f0-42b0-b2db-95ab9f093073-kube-api-access-2lfpn\") pod \"6ce82667-04f0-42b0-b2db-95ab9f093073\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.725676 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ce82667-04f0-42b0-b2db-95ab9f093073-serving-cert\") pod \"6ce82667-04f0-42b0-b2db-95ab9f093073\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.725730 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-config\") pod \"6ce82667-04f0-42b0-b2db-95ab9f093073\" (UID: \"6ce82667-04f0-42b0-b2db-95ab9f093073\") " Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.725778 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-client-ca\") pod \"23cc6144-fc5d-4e33-8f94-888844fb6283\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.725808 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-proxy-ca-bundles\") pod \"23cc6144-fc5d-4e33-8f94-888844fb6283\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.725907 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5s7p\" (UniqueName: \"kubernetes.io/projected/23cc6144-fc5d-4e33-8f94-888844fb6283-kube-api-access-n5s7p\") pod \"23cc6144-fc5d-4e33-8f94-888844fb6283\" (UID: \"23cc6144-fc5d-4e33-8f94-888844fb6283\") " Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.726313 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-config" (OuterVolumeSpecName: "config") pod "23cc6144-fc5d-4e33-8f94-888844fb6283" (UID: "23cc6144-fc5d-4e33-8f94-888844fb6283"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.726454 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-config" (OuterVolumeSpecName: "config") pod "6ce82667-04f0-42b0-b2db-95ab9f093073" (UID: "6ce82667-04f0-42b0-b2db-95ab9f093073"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.726621 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-client-ca" (OuterVolumeSpecName: "client-ca") pod "23cc6144-fc5d-4e33-8f94-888844fb6283" (UID: "23cc6144-fc5d-4e33-8f94-888844fb6283"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.726719 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-client-ca" (OuterVolumeSpecName: "client-ca") pod "6ce82667-04f0-42b0-b2db-95ab9f093073" (UID: "6ce82667-04f0-42b0-b2db-95ab9f093073"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.726817 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "23cc6144-fc5d-4e33-8f94-888844fb6283" (UID: "23cc6144-fc5d-4e33-8f94-888844fb6283"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.732444 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23cc6144-fc5d-4e33-8f94-888844fb6283-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "23cc6144-fc5d-4e33-8f94-888844fb6283" (UID: "23cc6144-fc5d-4e33-8f94-888844fb6283"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.733335 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23cc6144-fc5d-4e33-8f94-888844fb6283-kube-api-access-n5s7p" (OuterVolumeSpecName: "kube-api-access-n5s7p") pod "23cc6144-fc5d-4e33-8f94-888844fb6283" (UID: "23cc6144-fc5d-4e33-8f94-888844fb6283"). InnerVolumeSpecName "kube-api-access-n5s7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.734905 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ce82667-04f0-42b0-b2db-95ab9f093073-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6ce82667-04f0-42b0-b2db-95ab9f093073" (UID: "6ce82667-04f0-42b0-b2db-95ab9f093073"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.734915 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ce82667-04f0-42b0-b2db-95ab9f093073-kube-api-access-2lfpn" (OuterVolumeSpecName: "kube-api-access-2lfpn") pod "6ce82667-04f0-42b0-b2db-95ab9f093073" (UID: "6ce82667-04f0-42b0-b2db-95ab9f093073"). InnerVolumeSpecName "kube-api-access-2lfpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.827339 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.827373 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23cc6144-fc5d-4e33-8f94-888844fb6283-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.827383 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.827393 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lfpn\" (UniqueName: \"kubernetes.io/projected/6ce82667-04f0-42b0-b2db-95ab9f093073-kube-api-access-2lfpn\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.827403 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ce82667-04f0-42b0-b2db-95ab9f093073-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.827411 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ce82667-04f0-42b0-b2db-95ab9f093073-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.827420 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.827428 4876 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/23cc6144-fc5d-4e33-8f94-888844fb6283-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.827437 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5s7p\" (UniqueName: \"kubernetes.io/projected/23cc6144-fc5d-4e33-8f94-888844fb6283-kube-api-access-n5s7p\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.899985 4876 generic.go:334] "Generic (PLEG): container finished" podID="6ce82667-04f0-42b0-b2db-95ab9f093073" containerID="545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d" exitCode=0 Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.900079 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" event={"ID":"6ce82667-04f0-42b0-b2db-95ab9f093073","Type":"ContainerDied","Data":"545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d"} Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.900095 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.900882 4876 scope.go:117] "RemoveContainer" containerID="545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.900842 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" event={"ID":"6ce82667-04f0-42b0-b2db-95ab9f093073","Type":"ContainerDied","Data":"240c4cf83267cfb3eaa72efe06237aab1eae345750a49b9537e5424d78fcfbe0"} Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.903749 4876 generic.go:334] "Generic (PLEG): container finished" podID="23cc6144-fc5d-4e33-8f94-888844fb6283" containerID="bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808" exitCode=0 Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.903810 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.903809 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" event={"ID":"23cc6144-fc5d-4e33-8f94-888844fb6283","Type":"ContainerDied","Data":"bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808"} Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.903866 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-679646c95-f2k85" event={"ID":"23cc6144-fc5d-4e33-8f94-888844fb6283","Type":"ContainerDied","Data":"ca466426ce6265e03ceb0c0aa2a45e7d8bf0b3e25855e1d0da835bd2db6a7b80"} Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.920612 4876 scope.go:117] "RemoveContainer" containerID="545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d" Dec 15 06:56:43 crc kubenswrapper[4876]: E1215 06:56:43.921098 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d\": container with ID starting with 545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d not found: ID does not exist" containerID="545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.921165 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d"} err="failed to get container status \"545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d\": rpc error: code = NotFound desc = could not find container \"545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d\": container with ID starting with 545f21e6f2bdc5ee68dc9bed1bb11e421bf41fc6165204f420c16a6fb21d606d not found: ID does not exist" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.921200 4876 scope.go:117] "RemoveContainer" containerID="bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.946624 4876 scope.go:117] "RemoveContainer" containerID="bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808" Dec 15 06:56:43 crc kubenswrapper[4876]: E1215 06:56:43.948460 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808\": container with ID starting with bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808 not found: ID does not exist" containerID="bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.948506 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808"} err="failed to get container status \"bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808\": rpc error: code = NotFound desc = could not find container \"bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808\": container with ID starting with bf3bea141275ab34294bd35e0ee498bfd9e4e3adb033c92b3b33d729f79b6808 not found: ID does not exist" Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.961162 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz"] Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.962836 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz"] Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.968165 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-679646c95-f2k85"] Dec 15 06:56:43 crc kubenswrapper[4876]: I1215 06:56:43.971463 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-679646c95-f2k85"] Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.342998 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f"] Dec 15 06:56:44 crc kubenswrapper[4876]: E1215 06:56:44.343252 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ce82667-04f0-42b0-b2db-95ab9f093073" containerName="route-controller-manager" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.343278 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ce82667-04f0-42b0-b2db-95ab9f093073" containerName="route-controller-manager" Dec 15 06:56:44 crc kubenswrapper[4876]: E1215 06:56:44.343302 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.343310 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 15 06:56:44 crc kubenswrapper[4876]: E1215 06:56:44.343324 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23cc6144-fc5d-4e33-8f94-888844fb6283" containerName="controller-manager" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.343331 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="23cc6144-fc5d-4e33-8f94-888844fb6283" containerName="controller-manager" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.343412 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.343421 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ce82667-04f0-42b0-b2db-95ab9f093073" containerName="route-controller-manager" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.343433 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="23cc6144-fc5d-4e33-8f94-888844fb6283" containerName="controller-manager" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.343828 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.351312 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f"] Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.353079 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.361538 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.363523 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.366075 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.366159 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.366457 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.366563 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.366706 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.367033 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.367035 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.369081 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.369303 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.372838 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.383014 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f"] Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.390201 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f"] Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.397840 4876 patch_prober.go:28] interesting pod/route-controller-manager-7f8dcb5f49-n9gwz container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.62:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.397930 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-7f8dcb5f49-n9gwz" podUID="6ce82667-04f0-42b0-b2db-95ab9f093073" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.62:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.398791 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.439939 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-client-ca\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.440039 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-config\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.440090 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35a0c237-f174-4cd8-89ad-067b5b0b6713-client-ca\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.440510 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35a0c237-f174-4cd8-89ad-067b5b0b6713-proxy-ca-bundles\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.440645 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/707ef72c-ae1f-40f7-97c4-15bc261a2aec-serving-cert\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.440667 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35a0c237-f174-4cd8-89ad-067b5b0b6713-config\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.440722 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vszw\" (UniqueName: \"kubernetes.io/projected/35a0c237-f174-4cd8-89ad-067b5b0b6713-kube-api-access-8vszw\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.440784 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35a0c237-f174-4cd8-89ad-067b5b0b6713-serving-cert\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.440874 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7w5c\" (UniqueName: \"kubernetes.io/projected/707ef72c-ae1f-40f7-97c4-15bc261a2aec-kube-api-access-q7w5c\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.542772 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35a0c237-f174-4cd8-89ad-067b5b0b6713-proxy-ca-bundles\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.543088 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/707ef72c-ae1f-40f7-97c4-15bc261a2aec-serving-cert\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.543126 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35a0c237-f174-4cd8-89ad-067b5b0b6713-config\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.543150 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vszw\" (UniqueName: \"kubernetes.io/projected/35a0c237-f174-4cd8-89ad-067b5b0b6713-kube-api-access-8vszw\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.543170 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35a0c237-f174-4cd8-89ad-067b5b0b6713-serving-cert\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.543189 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7w5c\" (UniqueName: \"kubernetes.io/projected/707ef72c-ae1f-40f7-97c4-15bc261a2aec-kube-api-access-q7w5c\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.543236 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-client-ca\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.543252 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-config\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.543268 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35a0c237-f174-4cd8-89ad-067b5b0b6713-client-ca\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.544435 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/35a0c237-f174-4cd8-89ad-067b5b0b6713-client-ca\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.545163 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-config\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.545176 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-client-ca\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.545255 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35a0c237-f174-4cd8-89ad-067b5b0b6713-config\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.546173 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/35a0c237-f174-4cd8-89ad-067b5b0b6713-proxy-ca-bundles\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.548453 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/707ef72c-ae1f-40f7-97c4-15bc261a2aec-serving-cert\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.548713 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/35a0c237-f174-4cd8-89ad-067b5b0b6713-serving-cert\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.570087 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vszw\" (UniqueName: \"kubernetes.io/projected/35a0c237-f174-4cd8-89ad-067b5b0b6713-kube-api-access-8vszw\") pod \"controller-manager-5886c7ffb6-8gf9f\" (UID: \"35a0c237-f174-4cd8-89ad-067b5b0b6713\") " pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.572159 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7w5c\" (UniqueName: \"kubernetes.io/projected/707ef72c-ae1f-40f7-97c4-15bc261a2aec-kube-api-access-q7w5c\") pod \"route-controller-manager-5f5d69cd77-l487f\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.692622 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.702925 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.714360 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23cc6144-fc5d-4e33-8f94-888844fb6283" path="/var/lib/kubelet/pods/23cc6144-fc5d-4e33-8f94-888844fb6283/volumes" Dec 15 06:56:44 crc kubenswrapper[4876]: I1215 06:56:44.714945 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ce82667-04f0-42b0-b2db-95ab9f093073" path="/var/lib/kubelet/pods/6ce82667-04f0-42b0-b2db-95ab9f093073/volumes" Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.246849 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f"] Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.289156 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f"] Dec 15 06:56:45 crc kubenswrapper[4876]: W1215 06:56:45.294729 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35a0c237_f174_4cd8_89ad_067b5b0b6713.slice/crio-ee5ac6e8266ed4ab616a307f43357fb8521df90f9f70834497b06ddde5443caf WatchSource:0}: Error finding container ee5ac6e8266ed4ab616a307f43357fb8521df90f9f70834497b06ddde5443caf: Status 404 returned error can't find the container with id ee5ac6e8266ed4ab616a307f43357fb8521df90f9f70834497b06ddde5443caf Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.946158 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" event={"ID":"707ef72c-ae1f-40f7-97c4-15bc261a2aec","Type":"ContainerStarted","Data":"746616d610cad17b9a781c334af4c7dbfb6cb8c5df5b67d1ca1262b73ed93bbf"} Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.946801 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.946813 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" event={"ID":"707ef72c-ae1f-40f7-97c4-15bc261a2aec","Type":"ContainerStarted","Data":"bb1920c05bb365a0474342f5840124f9daf89225a1bb286443b2f87d4728c9e6"} Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.947539 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" event={"ID":"35a0c237-f174-4cd8-89ad-067b5b0b6713","Type":"ContainerStarted","Data":"acbd823d8e1963a31ecf30cbe86799957b45fa111d53950d1ad71a4683cad2b2"} Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.947583 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" event={"ID":"35a0c237-f174-4cd8-89ad-067b5b0b6713","Type":"ContainerStarted","Data":"ee5ac6e8266ed4ab616a307f43357fb8521df90f9f70834497b06ddde5443caf"} Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.947735 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.951005 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.954069 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.968543 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" podStartSLOduration=2.968524973 podStartE2EDuration="2.968524973s" podCreationTimestamp="2025-12-15 06:56:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:56:45.967919466 +0000 UTC m=+331.539062377" watchObservedRunningTime="2025-12-15 06:56:45.968524973 +0000 UTC m=+331.539667884" Dec 15 06:56:45 crc kubenswrapper[4876]: I1215 06:56:45.985159 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5886c7ffb6-8gf9f" podStartSLOduration=2.9851396 podStartE2EDuration="2.9851396s" podCreationTimestamp="2025-12-15 06:56:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:56:45.982915179 +0000 UTC m=+331.554058100" watchObservedRunningTime="2025-12-15 06:56:45.9851396 +0000 UTC m=+331.556282521" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.085778 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-vgtt6"] Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.086436 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.103699 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-vgtt6"] Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.168561 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-trusted-ca\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.168618 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-registry-tls\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.168660 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-ca-trust-extracted\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.168686 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-installation-pull-secrets\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.168707 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-bound-sa-token\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.168740 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rnh8\" (UniqueName: \"kubernetes.io/projected/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-kube-api-access-5rnh8\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.168777 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-registry-certificates\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.168826 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.195596 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.270490 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rnh8\" (UniqueName: \"kubernetes.io/projected/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-kube-api-access-5rnh8\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.270550 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-registry-certificates\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.270602 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-trusted-ca\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.270623 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-registry-tls\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.270646 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-ca-trust-extracted\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.270670 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-installation-pull-secrets\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.270704 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-bound-sa-token\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.271342 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-ca-trust-extracted\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.272035 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-registry-certificates\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.272268 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-trusted-ca\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.283631 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-registry-tls\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.284166 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-installation-pull-secrets\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.291178 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-bound-sa-token\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.291721 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rnh8\" (UniqueName: \"kubernetes.io/projected/e37070a4-78f2-4ace-8f8a-ecc72edc8b14-kube-api-access-5rnh8\") pod \"image-registry-66df7c8f76-vgtt6\" (UID: \"e37070a4-78f2-4ace-8f8a-ecc72edc8b14\") " pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.407196 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.851157 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-vgtt6"] Dec 15 06:56:46 crc kubenswrapper[4876]: W1215 06:56:46.864384 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode37070a4_78f2_4ace_8f8a_ecc72edc8b14.slice/crio-b147f8665a47131ac2691127b70e9b4390d3049f6734007e51bab6c7b002a5b1 WatchSource:0}: Error finding container b147f8665a47131ac2691127b70e9b4390d3049f6734007e51bab6c7b002a5b1: Status 404 returned error can't find the container with id b147f8665a47131ac2691127b70e9b4390d3049f6734007e51bab6c7b002a5b1 Dec 15 06:56:46 crc kubenswrapper[4876]: I1215 06:56:46.968869 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" event={"ID":"e37070a4-78f2-4ace-8f8a-ecc72edc8b14","Type":"ContainerStarted","Data":"b147f8665a47131ac2691127b70e9b4390d3049f6734007e51bab6c7b002a5b1"} Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.164619 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9n2p9"] Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.164965 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9n2p9" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerName="registry-server" containerID="cri-o://b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83" gracePeriod=30 Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.170910 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kf2qs"] Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.172036 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kf2qs" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" containerName="registry-server" containerID="cri-o://0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82" gracePeriod=30 Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.187001 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvwdp"] Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.187263 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerName="marketplace-operator" containerID="cri-o://8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf" gracePeriod=30 Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.193664 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2fgng"] Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.193926 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2fgng" podUID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerName="registry-server" containerID="cri-o://c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9" gracePeriod=30 Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.209002 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4bwjm"] Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.209388 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4bwjm" podUID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerName="registry-server" containerID="cri-o://9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87" gracePeriod=30 Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.217173 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rhjl6"] Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.218047 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.224067 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rhjl6"] Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.290804 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82lg8\" (UniqueName: \"kubernetes.io/projected/d2a29149-46f3-4504-98d2-251c30b194ab-kube-api-access-82lg8\") pod \"marketplace-operator-79b997595-rhjl6\" (UID: \"d2a29149-46f3-4504-98d2-251c30b194ab\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.291226 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2a29149-46f3-4504-98d2-251c30b194ab-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rhjl6\" (UID: \"d2a29149-46f3-4504-98d2-251c30b194ab\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.291278 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d2a29149-46f3-4504-98d2-251c30b194ab-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rhjl6\" (UID: \"d2a29149-46f3-4504-98d2-251c30b194ab\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.395781 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82lg8\" (UniqueName: \"kubernetes.io/projected/d2a29149-46f3-4504-98d2-251c30b194ab-kube-api-access-82lg8\") pod \"marketplace-operator-79b997595-rhjl6\" (UID: \"d2a29149-46f3-4504-98d2-251c30b194ab\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.395840 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2a29149-46f3-4504-98d2-251c30b194ab-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rhjl6\" (UID: \"d2a29149-46f3-4504-98d2-251c30b194ab\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.395907 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d2a29149-46f3-4504-98d2-251c30b194ab-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rhjl6\" (UID: \"d2a29149-46f3-4504-98d2-251c30b194ab\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.397295 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d2a29149-46f3-4504-98d2-251c30b194ab-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rhjl6\" (UID: \"d2a29149-46f3-4504-98d2-251c30b194ab\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.414014 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d2a29149-46f3-4504-98d2-251c30b194ab-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rhjl6\" (UID: \"d2a29149-46f3-4504-98d2-251c30b194ab\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.428155 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82lg8\" (UniqueName: \"kubernetes.io/projected/d2a29149-46f3-4504-98d2-251c30b194ab-kube-api-access-82lg8\") pod \"marketplace-operator-79b997595-rhjl6\" (UID: \"d2a29149-46f3-4504-98d2-251c30b194ab\") " pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.547594 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:47 crc kubenswrapper[4876]: E1215 06:56:47.615855 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82 is running failed: container process not found" containerID="0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82" cmd=["grpc_health_probe","-addr=:50051"] Dec 15 06:56:47 crc kubenswrapper[4876]: E1215 06:56:47.617712 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82 is running failed: container process not found" containerID="0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82" cmd=["grpc_health_probe","-addr=:50051"] Dec 15 06:56:47 crc kubenswrapper[4876]: E1215 06:56:47.618036 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82 is running failed: container process not found" containerID="0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82" cmd=["grpc_health_probe","-addr=:50051"] Dec 15 06:56:47 crc kubenswrapper[4876]: E1215 06:56:47.618128 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-kf2qs" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" containerName="registry-server" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.689080 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:56:47 crc kubenswrapper[4876]: E1215 06:56:47.777473 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83 is running failed: container process not found" containerID="b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83" cmd=["grpc_health_probe","-addr=:50051"] Dec 15 06:56:47 crc kubenswrapper[4876]: E1215 06:56:47.778658 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83 is running failed: container process not found" containerID="b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83" cmd=["grpc_health_probe","-addr=:50051"] Dec 15 06:56:47 crc kubenswrapper[4876]: E1215 06:56:47.779093 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83 is running failed: container process not found" containerID="b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83" cmd=["grpc_health_probe","-addr=:50051"] Dec 15 06:56:47 crc kubenswrapper[4876]: E1215 06:56:47.779156 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-9n2p9" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerName="registry-server" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.799575 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-catalog-content\") pod \"7dd38562-2323-4992-a015-7ba42406c1b5\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.799717 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p9kz\" (UniqueName: \"kubernetes.io/projected/7dd38562-2323-4992-a015-7ba42406c1b5-kube-api-access-2p9kz\") pod \"7dd38562-2323-4992-a015-7ba42406c1b5\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.799756 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-utilities\") pod \"7dd38562-2323-4992-a015-7ba42406c1b5\" (UID: \"7dd38562-2323-4992-a015-7ba42406c1b5\") " Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.800594 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-utilities" (OuterVolumeSpecName: "utilities") pod "7dd38562-2323-4992-a015-7ba42406c1b5" (UID: "7dd38562-2323-4992-a015-7ba42406c1b5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.802010 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.815298 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dd38562-2323-4992-a015-7ba42406c1b5-kube-api-access-2p9kz" (OuterVolumeSpecName: "kube-api-access-2p9kz") pod "7dd38562-2323-4992-a015-7ba42406c1b5" (UID: "7dd38562-2323-4992-a015-7ba42406c1b5"). InnerVolumeSpecName "kube-api-access-2p9kz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.837216 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.856194 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.870718 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.894026 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7dd38562-2323-4992-a015-7ba42406c1b5" (UID: "7dd38562-2323-4992-a015-7ba42406c1b5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.901444 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-catalog-content\") pod \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.901523 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vb5r7\" (UniqueName: \"kubernetes.io/projected/2b53b1ee-b695-4bd7-b591-e5bc2c731614-kube-api-access-vb5r7\") pod \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.901584 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-utilities\") pod \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\" (UID: \"2b53b1ee-b695-4bd7-b591-e5bc2c731614\") " Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.901823 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.901840 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dd38562-2323-4992-a015-7ba42406c1b5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.901855 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p9kz\" (UniqueName: \"kubernetes.io/projected/7dd38562-2323-4992-a015-7ba42406c1b5-kube-api-access-2p9kz\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.902633 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-utilities" (OuterVolumeSpecName: "utilities") pod "2b53b1ee-b695-4bd7-b591-e5bc2c731614" (UID: "2b53b1ee-b695-4bd7-b591-e5bc2c731614"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.904292 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b53b1ee-b695-4bd7-b591-e5bc2c731614-kube-api-access-vb5r7" (OuterVolumeSpecName: "kube-api-access-vb5r7") pod "2b53b1ee-b695-4bd7-b591-e5bc2c731614" (UID: "2b53b1ee-b695-4bd7-b591-e5bc2c731614"). InnerVolumeSpecName "kube-api-access-vb5r7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.984917 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2fgng" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.985004 4876 generic.go:334] "Generic (PLEG): container finished" podID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerID="c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9" exitCode=0 Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.985070 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2fgng" event={"ID":"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f","Type":"ContainerDied","Data":"c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9"} Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.985096 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2fgng" event={"ID":"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f","Type":"ContainerDied","Data":"5de2b97a8f5bf47d322d07671df9d5675257dda084611ebe4e6505350b92c08a"} Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.985136 4876 scope.go:117] "RemoveContainer" containerID="c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.988624 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4bwjm" event={"ID":"2b53b1ee-b695-4bd7-b591-e5bc2c731614","Type":"ContainerDied","Data":"9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87"} Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.988737 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4bwjm" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.988369 4876 generic.go:334] "Generic (PLEG): container finished" podID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerID="9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87" exitCode=0 Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.989023 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4bwjm" event={"ID":"2b53b1ee-b695-4bd7-b591-e5bc2c731614","Type":"ContainerDied","Data":"30595d45ec5818462cd940617891d5fa86e9422f8536bc0c3f2e2ebebe7bc678"} Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.991180 4876 generic.go:334] "Generic (PLEG): container finished" podID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerID="8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf" exitCode=0 Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.991223 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.991263 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" event={"ID":"47b07d03-5c39-4966-8850-ed81bcfc8e94","Type":"ContainerDied","Data":"8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf"} Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.991326 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvwdp" event={"ID":"47b07d03-5c39-4966-8850-ed81bcfc8e94","Type":"ContainerDied","Data":"868dc7fe2d7b8f1233adb9729e4cad34c3ff05441848e75212df2372377678cd"} Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.993495 4876 generic.go:334] "Generic (PLEG): container finished" podID="7dd38562-2323-4992-a015-7ba42406c1b5" containerID="0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82" exitCode=0 Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.993539 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf2qs" event={"ID":"7dd38562-2323-4992-a015-7ba42406c1b5","Type":"ContainerDied","Data":"0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82"} Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.993557 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kf2qs" event={"ID":"7dd38562-2323-4992-a015-7ba42406c1b5","Type":"ContainerDied","Data":"d505159b0db7e1a4f3ab46c559c6cfefed7f6eb19f9749b98b061f099bc83a06"} Dec 15 06:56:47 crc kubenswrapper[4876]: I1215 06:56:47.993605 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kf2qs" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.001639 4876 generic.go:334] "Generic (PLEG): container finished" podID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerID="b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83" exitCode=0 Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.001705 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9n2p9" event={"ID":"8c9a4d2a-d631-4257-8edd-82ef60db5de1","Type":"ContainerDied","Data":"b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83"} Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.001730 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9n2p9" event={"ID":"8c9a4d2a-d631-4257-8edd-82ef60db5de1","Type":"ContainerDied","Data":"3b3720638400a533a597891977fc5f6f2c8c2d234fd65a298eeb476043aea48e"} Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.001818 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9n2p9" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003137 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-utilities\") pod \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003307 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-catalog-content\") pod \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003410 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5jbk\" (UniqueName: \"kubernetes.io/projected/47b07d03-5c39-4966-8850-ed81bcfc8e94-kube-api-access-q5jbk\") pod \"47b07d03-5c39-4966-8850-ed81bcfc8e94\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003476 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-operator-metrics\") pod \"47b07d03-5c39-4966-8850-ed81bcfc8e94\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003499 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmqss\" (UniqueName: \"kubernetes.io/projected/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-kube-api-access-vmqss\") pod \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003551 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-utilities\") pod \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003590 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-trusted-ca\") pod \"47b07d03-5c39-4966-8850-ed81bcfc8e94\" (UID: \"47b07d03-5c39-4966-8850-ed81bcfc8e94\") " Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003620 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8sln\" (UniqueName: \"kubernetes.io/projected/8c9a4d2a-d631-4257-8edd-82ef60db5de1-kube-api-access-g8sln\") pod \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\" (UID: \"8c9a4d2a-d631-4257-8edd-82ef60db5de1\") " Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003683 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-catalog-content\") pod \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\" (UID: \"bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f\") " Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003881 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vb5r7\" (UniqueName: \"kubernetes.io/projected/2b53b1ee-b695-4bd7-b591-e5bc2c731614-kube-api-access-vb5r7\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.003893 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.006274 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-utilities" (OuterVolumeSpecName: "utilities") pod "8c9a4d2a-d631-4257-8edd-82ef60db5de1" (UID: "8c9a4d2a-d631-4257-8edd-82ef60db5de1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.006337 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" event={"ID":"e37070a4-78f2-4ace-8f8a-ecc72edc8b14","Type":"ContainerStarted","Data":"ede273910c6c5c22aee297a2a7ef4b54d8130567e52cf0051b695e2857fe3b3a"} Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.006545 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-utilities" (OuterVolumeSpecName: "utilities") pod "bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" (UID: "bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.006721 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.007113 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "47b07d03-5c39-4966-8850-ed81bcfc8e94" (UID: "47b07d03-5c39-4966-8850-ed81bcfc8e94"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.010242 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c9a4d2a-d631-4257-8edd-82ef60db5de1-kube-api-access-g8sln" (OuterVolumeSpecName: "kube-api-access-g8sln") pod "8c9a4d2a-d631-4257-8edd-82ef60db5de1" (UID: "8c9a4d2a-d631-4257-8edd-82ef60db5de1"). InnerVolumeSpecName "kube-api-access-g8sln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.013908 4876 scope.go:117] "RemoveContainer" containerID="70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.019219 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-kube-api-access-vmqss" (OuterVolumeSpecName: "kube-api-access-vmqss") pod "bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" (UID: "bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f"). InnerVolumeSpecName "kube-api-access-vmqss". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.022652 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "47b07d03-5c39-4966-8850-ed81bcfc8e94" (UID: "47b07d03-5c39-4966-8850-ed81bcfc8e94"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.028916 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" podStartSLOduration=2.028901264 podStartE2EDuration="2.028901264s" podCreationTimestamp="2025-12-15 06:56:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:56:48.025932133 +0000 UTC m=+333.597075054" watchObservedRunningTime="2025-12-15 06:56:48.028901264 +0000 UTC m=+333.600044175" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.029188 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47b07d03-5c39-4966-8850-ed81bcfc8e94-kube-api-access-q5jbk" (OuterVolumeSpecName: "kube-api-access-q5jbk") pod "47b07d03-5c39-4966-8850-ed81bcfc8e94" (UID: "47b07d03-5c39-4966-8850-ed81bcfc8e94"). InnerVolumeSpecName "kube-api-access-q5jbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.032221 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2b53b1ee-b695-4bd7-b591-e5bc2c731614" (UID: "2b53b1ee-b695-4bd7-b591-e5bc2c731614"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.034564 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" (UID: "bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.041251 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kf2qs"] Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.044582 4876 scope.go:117] "RemoveContainer" containerID="c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.044748 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kf2qs"] Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.055520 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c9a4d2a-d631-4257-8edd-82ef60db5de1" (UID: "8c9a4d2a-d631-4257-8edd-82ef60db5de1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.058331 4876 scope.go:117] "RemoveContainer" containerID="c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.058596 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9\": container with ID starting with c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9 not found: ID does not exist" containerID="c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.058627 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9"} err="failed to get container status \"c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9\": rpc error: code = NotFound desc = could not find container \"c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9\": container with ID starting with c3b0157c5712071a5d0e2977e8dafc9deafb4c946a6426f861300e584d7b28b9 not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.058650 4876 scope.go:117] "RemoveContainer" containerID="70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.058937 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a\": container with ID starting with 70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a not found: ID does not exist" containerID="70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.058982 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a"} err="failed to get container status \"70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a\": rpc error: code = NotFound desc = could not find container \"70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a\": container with ID starting with 70356ad334307c99e353d2498481cdb9a8995bb444187e51d0ef9bcef80f493a not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.059011 4876 scope.go:117] "RemoveContainer" containerID="c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.059614 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde\": container with ID starting with c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde not found: ID does not exist" containerID="c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.059644 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde"} err="failed to get container status \"c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde\": rpc error: code = NotFound desc = could not find container \"c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde\": container with ID starting with c14558998d5ed3b9c55e71b10de1b07de305da7ea1e40a167ca35a8d18fdbcde not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.059663 4876 scope.go:117] "RemoveContainer" containerID="9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.072495 4876 scope.go:117] "RemoveContainer" containerID="8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.088510 4876 scope.go:117] "RemoveContainer" containerID="4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.104947 4876 scope.go:117] "RemoveContainer" containerID="9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.105373 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87\": container with ID starting with 9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87 not found: ID does not exist" containerID="9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105415 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87"} err="failed to get container status \"9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87\": rpc error: code = NotFound desc = could not find container \"9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87\": container with ID starting with 9c5645869b03d3208ee7a5f84a3fd0c243cfa481393da84c1f217f5a7a82ad87 not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105446 4876 scope.go:117] "RemoveContainer" containerID="8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105768 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmqss\" (UniqueName: \"kubernetes.io/projected/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-kube-api-access-vmqss\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105799 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105814 4876 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105827 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8sln\" (UniqueName: \"kubernetes.io/projected/8c9a4d2a-d631-4257-8edd-82ef60db5de1-kube-api-access-g8sln\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105839 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105852 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105865 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b53b1ee-b695-4bd7-b591-e5bc2c731614-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105876 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c9a4d2a-d631-4257-8edd-82ef60db5de1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105888 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5jbk\" (UniqueName: \"kubernetes.io/projected/47b07d03-5c39-4966-8850-ed81bcfc8e94-kube-api-access-q5jbk\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.105900 4876 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/47b07d03-5c39-4966-8850-ed81bcfc8e94-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.106244 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc\": container with ID starting with 8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc not found: ID does not exist" containerID="8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.106270 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc"} err="failed to get container status \"8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc\": rpc error: code = NotFound desc = could not find container \"8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc\": container with ID starting with 8ec23ffb4c3c379e0e69633a036298d4c275d5b6f71798666cbee13ffa7017fc not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.106284 4876 scope.go:117] "RemoveContainer" containerID="4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.106530 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf\": container with ID starting with 4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf not found: ID does not exist" containerID="4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.106566 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf"} err="failed to get container status \"4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf\": rpc error: code = NotFound desc = could not find container \"4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf\": container with ID starting with 4bcb8a97d40ab2dbe45f531b26c40903564fb366f693f697543f2c25250212bf not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.106592 4876 scope.go:117] "RemoveContainer" containerID="8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.110760 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rhjl6"] Dec 15 06:56:48 crc kubenswrapper[4876]: W1215 06:56:48.113537 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2a29149_46f3_4504_98d2_251c30b194ab.slice/crio-84a9c2d340c7c401c5e83f9a9d76eeec748df53f4852c9dd4978d7e99aa9fcf0 WatchSource:0}: Error finding container 84a9c2d340c7c401c5e83f9a9d76eeec748df53f4852c9dd4978d7e99aa9fcf0: Status 404 returned error can't find the container with id 84a9c2d340c7c401c5e83f9a9d76eeec748df53f4852c9dd4978d7e99aa9fcf0 Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.119595 4876 scope.go:117] "RemoveContainer" containerID="868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.138301 4876 scope.go:117] "RemoveContainer" containerID="8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.138661 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf\": container with ID starting with 8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf not found: ID does not exist" containerID="8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.138696 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf"} err="failed to get container status \"8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf\": rpc error: code = NotFound desc = could not find container \"8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf\": container with ID starting with 8ed2bc742f20ae043332130142b4165ac1b3044f305b85cb29c8b28d93e07abf not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.138727 4876 scope.go:117] "RemoveContainer" containerID="868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.139025 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa\": container with ID starting with 868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa not found: ID does not exist" containerID="868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.139048 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa"} err="failed to get container status \"868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa\": rpc error: code = NotFound desc = could not find container \"868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa\": container with ID starting with 868694df4e1a052bc9dcaa75a653d2d84d9f085c0901d3c35afa3e396cbda8aa not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.139066 4876 scope.go:117] "RemoveContainer" containerID="0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.157578 4876 scope.go:117] "RemoveContainer" containerID="7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.181566 4876 scope.go:117] "RemoveContainer" containerID="55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.210069 4876 scope.go:117] "RemoveContainer" containerID="0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.212470 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82\": container with ID starting with 0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82 not found: ID does not exist" containerID="0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.212571 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82"} err="failed to get container status \"0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82\": rpc error: code = NotFound desc = could not find container \"0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82\": container with ID starting with 0498b42d9e43362cfea0cf1ef5ef0a4889d1123fe44f6e48c15e2e5af2271b82 not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.212600 4876 scope.go:117] "RemoveContainer" containerID="7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.213413 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638\": container with ID starting with 7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638 not found: ID does not exist" containerID="7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.213434 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638"} err="failed to get container status \"7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638\": rpc error: code = NotFound desc = could not find container \"7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638\": container with ID starting with 7d4aeed7fce925482d6dbbb3a071781f92696e9fd38636aa7952e6e4c9617638 not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.213453 4876 scope.go:117] "RemoveContainer" containerID="55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.213822 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee\": container with ID starting with 55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee not found: ID does not exist" containerID="55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.213840 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee"} err="failed to get container status \"55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee\": rpc error: code = NotFound desc = could not find container \"55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee\": container with ID starting with 55f145a7a5ab1f757d658f7b75890a93d485b21b801f7b9d9e8ac7ba4ff0beee not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.213854 4876 scope.go:117] "RemoveContainer" containerID="b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.237895 4876 scope.go:117] "RemoveContainer" containerID="fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.258233 4876 scope.go:117] "RemoveContainer" containerID="e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.279846 4876 scope.go:117] "RemoveContainer" containerID="b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.280696 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83\": container with ID starting with b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83 not found: ID does not exist" containerID="b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.280726 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83"} err="failed to get container status \"b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83\": rpc error: code = NotFound desc = could not find container \"b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83\": container with ID starting with b1b7ed0e1704939a10789f93e818d810ef55824b40c13a5f32bfed4c4fcd4f83 not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.280748 4876 scope.go:117] "RemoveContainer" containerID="fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.280974 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767\": container with ID starting with fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767 not found: ID does not exist" containerID="fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.280995 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767"} err="failed to get container status \"fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767\": rpc error: code = NotFound desc = could not find container \"fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767\": container with ID starting with fa66fe83321b988883ce02d00bf8cd01a8ff65d48fe02b64b39abe2849764767 not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.281007 4876 scope.go:117] "RemoveContainer" containerID="e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897" Dec 15 06:56:48 crc kubenswrapper[4876]: E1215 06:56:48.281622 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897\": container with ID starting with e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897 not found: ID does not exist" containerID="e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.281648 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897"} err="failed to get container status \"e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897\": rpc error: code = NotFound desc = could not find container \"e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897\": container with ID starting with e2b982275cc632e8909220cd97d8a687b0d35bdf178b0745544791cdec24a897 not found: ID does not exist" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.316306 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2fgng"] Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.326930 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2fgng"] Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.337559 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4bwjm"] Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.340681 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4bwjm"] Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.350584 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvwdp"] Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.355980 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvwdp"] Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.386255 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9n2p9"] Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.394831 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9n2p9"] Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.718744 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" path="/var/lib/kubelet/pods/2b53b1ee-b695-4bd7-b591-e5bc2c731614/volumes" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.720714 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" path="/var/lib/kubelet/pods/47b07d03-5c39-4966-8850-ed81bcfc8e94/volumes" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.722018 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" path="/var/lib/kubelet/pods/7dd38562-2323-4992-a015-7ba42406c1b5/volumes" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.723524 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" path="/var/lib/kubelet/pods/8c9a4d2a-d631-4257-8edd-82ef60db5de1/volumes" Dec 15 06:56:48 crc kubenswrapper[4876]: I1215 06:56:48.724390 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" path="/var/lib/kubelet/pods/bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f/volumes" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.016762 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" event={"ID":"d2a29149-46f3-4504-98d2-251c30b194ab","Type":"ContainerStarted","Data":"ac11c288d4930e286663509af6e37aef612120067f8f55cba8b6b966e476ed12"} Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.016972 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" event={"ID":"d2a29149-46f3-4504-98d2-251c30b194ab","Type":"ContainerStarted","Data":"84a9c2d340c7c401c5e83f9a9d76eeec748df53f4852c9dd4978d7e99aa9fcf0"} Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.017170 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.025815 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.035067 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-rhjl6" podStartSLOduration=2.035043653 podStartE2EDuration="2.035043653s" podCreationTimestamp="2025-12-15 06:56:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:56:49.031821183 +0000 UTC m=+334.602964094" watchObservedRunningTime="2025-12-15 06:56:49.035043653 +0000 UTC m=+334.606186564" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.915753 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lcw6w"] Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916403 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916420 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916437 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerName="extract-content" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916446 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerName="extract-content" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916460 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerName="extract-utilities" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916471 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerName="extract-utilities" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916486 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerName="extract-utilities" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916497 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerName="extract-utilities" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916509 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916520 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916535 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerName="extract-content" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916546 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerName="extract-content" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916561 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerName="marketplace-operator" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916572 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerName="marketplace-operator" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916585 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerName="extract-content" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916596 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerName="extract-content" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916613 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" containerName="extract-utilities" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916624 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" containerName="extract-utilities" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916637 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" containerName="extract-content" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916645 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" containerName="extract-content" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916683 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerName="extract-utilities" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916694 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerName="extract-utilities" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916708 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916716 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916728 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerName="marketplace-operator" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916737 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerName="marketplace-operator" Dec 15 06:56:49 crc kubenswrapper[4876]: E1215 06:56:49.916747 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916755 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916883 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dd38562-2323-4992-a015-7ba42406c1b5" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916900 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerName="marketplace-operator" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916912 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b53b1ee-b695-4bd7-b591-e5bc2c731614" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916922 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="47b07d03-5c39-4966-8850-ed81bcfc8e94" containerName="marketplace-operator" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916934 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc79e1eb-6f54-46d6-9e7f-6e5d7021f10f" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.916945 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c9a4d2a-d631-4257-8edd-82ef60db5de1" containerName="registry-server" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.917845 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.921556 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 15 06:56:49 crc kubenswrapper[4876]: I1215 06:56:49.933888 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lcw6w"] Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.032054 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnf4r\" (UniqueName: \"kubernetes.io/projected/39dae781-a31a-44d6-aa95-4766741ccf26-kube-api-access-dnf4r\") pod \"certified-operators-lcw6w\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.032156 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-catalog-content\") pod \"certified-operators-lcw6w\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.032201 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-utilities\") pod \"certified-operators-lcw6w\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.111261 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fwlwf"] Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.112173 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.114767 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.125224 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fwlwf"] Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.134378 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnf4r\" (UniqueName: \"kubernetes.io/projected/39dae781-a31a-44d6-aa95-4766741ccf26-kube-api-access-dnf4r\") pod \"certified-operators-lcw6w\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.134464 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-catalog-content\") pod \"certified-operators-lcw6w\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.134506 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-utilities\") pod \"certified-operators-lcw6w\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.135159 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-utilities\") pod \"certified-operators-lcw6w\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.137199 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-catalog-content\") pod \"certified-operators-lcw6w\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.159053 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnf4r\" (UniqueName: \"kubernetes.io/projected/39dae781-a31a-44d6-aa95-4766741ccf26-kube-api-access-dnf4r\") pod \"certified-operators-lcw6w\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.235906 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8832c04-2ac1-4a03-8b37-c7f16b0742f4-utilities\") pod \"community-operators-fwlwf\" (UID: \"b8832c04-2ac1-4a03-8b37-c7f16b0742f4\") " pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.236178 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8832c04-2ac1-4a03-8b37-c7f16b0742f4-catalog-content\") pod \"community-operators-fwlwf\" (UID: \"b8832c04-2ac1-4a03-8b37-c7f16b0742f4\") " pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.236294 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrhtl\" (UniqueName: \"kubernetes.io/projected/b8832c04-2ac1-4a03-8b37-c7f16b0742f4-kube-api-access-jrhtl\") pod \"community-operators-fwlwf\" (UID: \"b8832c04-2ac1-4a03-8b37-c7f16b0742f4\") " pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.240535 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.337684 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8832c04-2ac1-4a03-8b37-c7f16b0742f4-catalog-content\") pod \"community-operators-fwlwf\" (UID: \"b8832c04-2ac1-4a03-8b37-c7f16b0742f4\") " pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.338074 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrhtl\" (UniqueName: \"kubernetes.io/projected/b8832c04-2ac1-4a03-8b37-c7f16b0742f4-kube-api-access-jrhtl\") pod \"community-operators-fwlwf\" (UID: \"b8832c04-2ac1-4a03-8b37-c7f16b0742f4\") " pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.338170 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8832c04-2ac1-4a03-8b37-c7f16b0742f4-utilities\") pod \"community-operators-fwlwf\" (UID: \"b8832c04-2ac1-4a03-8b37-c7f16b0742f4\") " pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.338636 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8832c04-2ac1-4a03-8b37-c7f16b0742f4-utilities\") pod \"community-operators-fwlwf\" (UID: \"b8832c04-2ac1-4a03-8b37-c7f16b0742f4\") " pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.338928 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8832c04-2ac1-4a03-8b37-c7f16b0742f4-catalog-content\") pod \"community-operators-fwlwf\" (UID: \"b8832c04-2ac1-4a03-8b37-c7f16b0742f4\") " pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.364403 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrhtl\" (UniqueName: \"kubernetes.io/projected/b8832c04-2ac1-4a03-8b37-c7f16b0742f4-kube-api-access-jrhtl\") pod \"community-operators-fwlwf\" (UID: \"b8832c04-2ac1-4a03-8b37-c7f16b0742f4\") " pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.423993 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.675766 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lcw6w"] Dec 15 06:56:50 crc kubenswrapper[4876]: W1215 06:56:50.686081 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39dae781_a31a_44d6_aa95_4766741ccf26.slice/crio-86eb57a5f07487de94788ddde15ad2550c6b243845ee636e9764a5dcff04e2af WatchSource:0}: Error finding container 86eb57a5f07487de94788ddde15ad2550c6b243845ee636e9764a5dcff04e2af: Status 404 returned error can't find the container with id 86eb57a5f07487de94788ddde15ad2550c6b243845ee636e9764a5dcff04e2af Dec 15 06:56:50 crc kubenswrapper[4876]: I1215 06:56:50.853764 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fwlwf"] Dec 15 06:56:50 crc kubenswrapper[4876]: W1215 06:56:50.865549 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8832c04_2ac1_4a03_8b37_c7f16b0742f4.slice/crio-907f87a6dd2a8846ae448aabe4d8242a84b56bf899b74f7b0e66466171ff4d74 WatchSource:0}: Error finding container 907f87a6dd2a8846ae448aabe4d8242a84b56bf899b74f7b0e66466171ff4d74: Status 404 returned error can't find the container with id 907f87a6dd2a8846ae448aabe4d8242a84b56bf899b74f7b0e66466171ff4d74 Dec 15 06:56:51 crc kubenswrapper[4876]: I1215 06:56:51.038149 4876 generic.go:334] "Generic (PLEG): container finished" podID="b8832c04-2ac1-4a03-8b37-c7f16b0742f4" containerID="42dc985b7c2d7e22ce5668d532cf051a3d023ed0c1917bf0ea8bccb4ff6412f0" exitCode=0 Dec 15 06:56:51 crc kubenswrapper[4876]: I1215 06:56:51.038254 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwlwf" event={"ID":"b8832c04-2ac1-4a03-8b37-c7f16b0742f4","Type":"ContainerDied","Data":"42dc985b7c2d7e22ce5668d532cf051a3d023ed0c1917bf0ea8bccb4ff6412f0"} Dec 15 06:56:51 crc kubenswrapper[4876]: I1215 06:56:51.038330 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwlwf" event={"ID":"b8832c04-2ac1-4a03-8b37-c7f16b0742f4","Type":"ContainerStarted","Data":"907f87a6dd2a8846ae448aabe4d8242a84b56bf899b74f7b0e66466171ff4d74"} Dec 15 06:56:51 crc kubenswrapper[4876]: I1215 06:56:51.039860 4876 generic.go:334] "Generic (PLEG): container finished" podID="39dae781-a31a-44d6-aa95-4766741ccf26" containerID="c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4" exitCode=0 Dec 15 06:56:51 crc kubenswrapper[4876]: I1215 06:56:51.039945 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcw6w" event={"ID":"39dae781-a31a-44d6-aa95-4766741ccf26","Type":"ContainerDied","Data":"c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4"} Dec 15 06:56:51 crc kubenswrapper[4876]: I1215 06:56:51.039970 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcw6w" event={"ID":"39dae781-a31a-44d6-aa95-4766741ccf26","Type":"ContainerStarted","Data":"86eb57a5f07487de94788ddde15ad2550c6b243845ee636e9764a5dcff04e2af"} Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.047542 4876 generic.go:334] "Generic (PLEG): container finished" podID="39dae781-a31a-44d6-aa95-4766741ccf26" containerID="0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5" exitCode=0 Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.047593 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcw6w" event={"ID":"39dae781-a31a-44d6-aa95-4766741ccf26","Type":"ContainerDied","Data":"0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5"} Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.050246 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwlwf" event={"ID":"b8832c04-2ac1-4a03-8b37-c7f16b0742f4","Type":"ContainerStarted","Data":"833c0a23c25626746017b58882bcd9f9c260ec63207b20850b53546e888e3d55"} Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.313675 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5kn4w"] Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.315197 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.317596 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.334890 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5kn4w"] Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.471560 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e9f66f7-7322-460d-ba14-60077f9c04a6-utilities\") pod \"redhat-marketplace-5kn4w\" (UID: \"0e9f66f7-7322-460d-ba14-60077f9c04a6\") " pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.471932 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e9f66f7-7322-460d-ba14-60077f9c04a6-catalog-content\") pod \"redhat-marketplace-5kn4w\" (UID: \"0e9f66f7-7322-460d-ba14-60077f9c04a6\") " pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.472200 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42klq\" (UniqueName: \"kubernetes.io/projected/0e9f66f7-7322-460d-ba14-60077f9c04a6-kube-api-access-42klq\") pod \"redhat-marketplace-5kn4w\" (UID: \"0e9f66f7-7322-460d-ba14-60077f9c04a6\") " pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.510937 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-97lkp"] Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.512316 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.515090 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.519986 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97lkp"] Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.573310 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42klq\" (UniqueName: \"kubernetes.io/projected/0e9f66f7-7322-460d-ba14-60077f9c04a6-kube-api-access-42klq\") pod \"redhat-marketplace-5kn4w\" (UID: \"0e9f66f7-7322-460d-ba14-60077f9c04a6\") " pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.573374 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e9f66f7-7322-460d-ba14-60077f9c04a6-utilities\") pod \"redhat-marketplace-5kn4w\" (UID: \"0e9f66f7-7322-460d-ba14-60077f9c04a6\") " pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.573411 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e9f66f7-7322-460d-ba14-60077f9c04a6-catalog-content\") pod \"redhat-marketplace-5kn4w\" (UID: \"0e9f66f7-7322-460d-ba14-60077f9c04a6\") " pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.573852 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e9f66f7-7322-460d-ba14-60077f9c04a6-catalog-content\") pod \"redhat-marketplace-5kn4w\" (UID: \"0e9f66f7-7322-460d-ba14-60077f9c04a6\") " pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.573933 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e9f66f7-7322-460d-ba14-60077f9c04a6-utilities\") pod \"redhat-marketplace-5kn4w\" (UID: \"0e9f66f7-7322-460d-ba14-60077f9c04a6\") " pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.590448 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42klq\" (UniqueName: \"kubernetes.io/projected/0e9f66f7-7322-460d-ba14-60077f9c04a6-kube-api-access-42klq\") pod \"redhat-marketplace-5kn4w\" (UID: \"0e9f66f7-7322-460d-ba14-60077f9c04a6\") " pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.674499 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-utilities\") pod \"redhat-operators-97lkp\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.674740 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6qtj\" (UniqueName: \"kubernetes.io/projected/f59147b5-6786-4a73-8d94-eac80330370c-kube-api-access-c6qtj\") pod \"redhat-operators-97lkp\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.674824 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-catalog-content\") pod \"redhat-operators-97lkp\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.689886 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.775880 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-utilities\") pod \"redhat-operators-97lkp\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.776034 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6qtj\" (UniqueName: \"kubernetes.io/projected/f59147b5-6786-4a73-8d94-eac80330370c-kube-api-access-c6qtj\") pod \"redhat-operators-97lkp\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.776087 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-catalog-content\") pod \"redhat-operators-97lkp\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.776872 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-catalog-content\") pod \"redhat-operators-97lkp\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.776947 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-utilities\") pod \"redhat-operators-97lkp\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.798499 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6qtj\" (UniqueName: \"kubernetes.io/projected/f59147b5-6786-4a73-8d94-eac80330370c-kube-api-access-c6qtj\") pod \"redhat-operators-97lkp\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:52 crc kubenswrapper[4876]: I1215 06:56:52.831368 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:56:53 crc kubenswrapper[4876]: I1215 06:56:53.057401 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcw6w" event={"ID":"39dae781-a31a-44d6-aa95-4766741ccf26","Type":"ContainerStarted","Data":"22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4"} Dec 15 06:56:53 crc kubenswrapper[4876]: I1215 06:56:53.060741 4876 generic.go:334] "Generic (PLEG): container finished" podID="b8832c04-2ac1-4a03-8b37-c7f16b0742f4" containerID="833c0a23c25626746017b58882bcd9f9c260ec63207b20850b53546e888e3d55" exitCode=0 Dec 15 06:56:53 crc kubenswrapper[4876]: I1215 06:56:53.060993 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwlwf" event={"ID":"b8832c04-2ac1-4a03-8b37-c7f16b0742f4","Type":"ContainerDied","Data":"833c0a23c25626746017b58882bcd9f9c260ec63207b20850b53546e888e3d55"} Dec 15 06:56:53 crc kubenswrapper[4876]: I1215 06:56:53.077174 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lcw6w" podStartSLOduration=2.442057138 podStartE2EDuration="4.077153637s" podCreationTimestamp="2025-12-15 06:56:49 +0000 UTC" firstStartedPulling="2025-12-15 06:56:51.041780915 +0000 UTC m=+336.612923826" lastFinishedPulling="2025-12-15 06:56:52.676877424 +0000 UTC m=+338.248020325" observedRunningTime="2025-12-15 06:56:53.076475828 +0000 UTC m=+338.647618749" watchObservedRunningTime="2025-12-15 06:56:53.077153637 +0000 UTC m=+338.648296558" Dec 15 06:56:53 crc kubenswrapper[4876]: I1215 06:56:53.140786 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5kn4w"] Dec 15 06:56:53 crc kubenswrapper[4876]: I1215 06:56:53.256515 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97lkp"] Dec 15 06:56:54 crc kubenswrapper[4876]: I1215 06:56:54.068476 4876 generic.go:334] "Generic (PLEG): container finished" podID="f59147b5-6786-4a73-8d94-eac80330370c" containerID="ecb3226ab81a598d0438c371570083df754a3814f84060567c3dd40c326028ce" exitCode=0 Dec 15 06:56:54 crc kubenswrapper[4876]: I1215 06:56:54.068995 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97lkp" event={"ID":"f59147b5-6786-4a73-8d94-eac80330370c","Type":"ContainerDied","Data":"ecb3226ab81a598d0438c371570083df754a3814f84060567c3dd40c326028ce"} Dec 15 06:56:54 crc kubenswrapper[4876]: I1215 06:56:54.070195 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97lkp" event={"ID":"f59147b5-6786-4a73-8d94-eac80330370c","Type":"ContainerStarted","Data":"5b86ddb76cf8b2515dbce8d2bf05aa36bc9bd512120c18ae01027c013f961557"} Dec 15 06:56:54 crc kubenswrapper[4876]: I1215 06:56:54.073884 4876 generic.go:334] "Generic (PLEG): container finished" podID="0e9f66f7-7322-460d-ba14-60077f9c04a6" containerID="9810e571a3d414cc44a00b14fd70395581ec776cbbf8065368b8a3dee8e95031" exitCode=0 Dec 15 06:56:54 crc kubenswrapper[4876]: I1215 06:56:54.074005 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5kn4w" event={"ID":"0e9f66f7-7322-460d-ba14-60077f9c04a6","Type":"ContainerDied","Data":"9810e571a3d414cc44a00b14fd70395581ec776cbbf8065368b8a3dee8e95031"} Dec 15 06:56:54 crc kubenswrapper[4876]: I1215 06:56:54.074035 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5kn4w" event={"ID":"0e9f66f7-7322-460d-ba14-60077f9c04a6","Type":"ContainerStarted","Data":"428b2696eabfd5fa997c510ba0207b4834a14f5db0930d10971ed7f44773641b"} Dec 15 06:56:54 crc kubenswrapper[4876]: I1215 06:56:54.080301 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fwlwf" event={"ID":"b8832c04-2ac1-4a03-8b37-c7f16b0742f4","Type":"ContainerStarted","Data":"56e1a5032196d2bab79a6081be227cb49cd75571138ac7f5ebb55fb0cbbbe9b7"} Dec 15 06:56:54 crc kubenswrapper[4876]: I1215 06:56:54.112696 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fwlwf" podStartSLOduration=1.60993376 podStartE2EDuration="4.112676285s" podCreationTimestamp="2025-12-15 06:56:50 +0000 UTC" firstStartedPulling="2025-12-15 06:56:51.041252041 +0000 UTC m=+336.612394962" lastFinishedPulling="2025-12-15 06:56:53.543994566 +0000 UTC m=+339.115137487" observedRunningTime="2025-12-15 06:56:54.107510762 +0000 UTC m=+339.678653713" watchObservedRunningTime="2025-12-15 06:56:54.112676285 +0000 UTC m=+339.683819196" Dec 15 06:56:55 crc kubenswrapper[4876]: I1215 06:56:55.085638 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5kn4w" event={"ID":"0e9f66f7-7322-460d-ba14-60077f9c04a6","Type":"ContainerStarted","Data":"8ff8946181060e45c179003d837ffff8b859b1ead97675e0c217289e733f8141"} Dec 15 06:56:55 crc kubenswrapper[4876]: I1215 06:56:55.088660 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97lkp" event={"ID":"f59147b5-6786-4a73-8d94-eac80330370c","Type":"ContainerStarted","Data":"1253a34d7893ceace55417de6462076887572c0cd7ee62ab9bcbf5a64d50ca4d"} Dec 15 06:56:56 crc kubenswrapper[4876]: I1215 06:56:56.098364 4876 generic.go:334] "Generic (PLEG): container finished" podID="f59147b5-6786-4a73-8d94-eac80330370c" containerID="1253a34d7893ceace55417de6462076887572c0cd7ee62ab9bcbf5a64d50ca4d" exitCode=0 Dec 15 06:56:56 crc kubenswrapper[4876]: I1215 06:56:56.098453 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97lkp" event={"ID":"f59147b5-6786-4a73-8d94-eac80330370c","Type":"ContainerDied","Data":"1253a34d7893ceace55417de6462076887572c0cd7ee62ab9bcbf5a64d50ca4d"} Dec 15 06:56:56 crc kubenswrapper[4876]: I1215 06:56:56.103036 4876 generic.go:334] "Generic (PLEG): container finished" podID="0e9f66f7-7322-460d-ba14-60077f9c04a6" containerID="8ff8946181060e45c179003d837ffff8b859b1ead97675e0c217289e733f8141" exitCode=0 Dec 15 06:56:56 crc kubenswrapper[4876]: I1215 06:56:56.103071 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5kn4w" event={"ID":"0e9f66f7-7322-460d-ba14-60077f9c04a6","Type":"ContainerDied","Data":"8ff8946181060e45c179003d837ffff8b859b1ead97675e0c217289e733f8141"} Dec 15 06:56:57 crc kubenswrapper[4876]: I1215 06:56:57.110139 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97lkp" event={"ID":"f59147b5-6786-4a73-8d94-eac80330370c","Type":"ContainerStarted","Data":"884279527c9e8b114a12802ed2d99e3510320c89eb5db77946c0911f61106f46"} Dec 15 06:56:57 crc kubenswrapper[4876]: I1215 06:56:57.113451 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5kn4w" event={"ID":"0e9f66f7-7322-460d-ba14-60077f9c04a6","Type":"ContainerStarted","Data":"fd7073007c42dc577f0d4897d1d9d06092e52640f39695c22f27d44b01153a65"} Dec 15 06:56:57 crc kubenswrapper[4876]: I1215 06:56:57.124670 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-97lkp" podStartSLOduration=2.54896059 podStartE2EDuration="5.12465376s" podCreationTimestamp="2025-12-15 06:56:52 +0000 UTC" firstStartedPulling="2025-12-15 06:56:54.0708166 +0000 UTC m=+339.641959511" lastFinishedPulling="2025-12-15 06:56:56.64650978 +0000 UTC m=+342.217652681" observedRunningTime="2025-12-15 06:56:57.124211898 +0000 UTC m=+342.695354819" watchObservedRunningTime="2025-12-15 06:56:57.12465376 +0000 UTC m=+342.695796671" Dec 15 06:56:57 crc kubenswrapper[4876]: I1215 06:56:57.153441 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5kn4w" podStartSLOduration=2.61740226 podStartE2EDuration="5.153404104s" podCreationTimestamp="2025-12-15 06:56:52 +0000 UTC" firstStartedPulling="2025-12-15 06:56:54.07588249 +0000 UTC m=+339.647025401" lastFinishedPulling="2025-12-15 06:56:56.611884324 +0000 UTC m=+342.183027245" observedRunningTime="2025-12-15 06:56:57.137250238 +0000 UTC m=+342.708393149" watchObservedRunningTime="2025-12-15 06:56:57.153404104 +0000 UTC m=+342.724547095" Dec 15 06:56:57 crc kubenswrapper[4876]: I1215 06:56:57.322871 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 06:56:57 crc kubenswrapper[4876]: I1215 06:56:57.322950 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 06:57:00 crc kubenswrapper[4876]: I1215 06:57:00.240856 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:57:00 crc kubenswrapper[4876]: I1215 06:57:00.241260 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:57:00 crc kubenswrapper[4876]: I1215 06:57:00.288857 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:57:00 crc kubenswrapper[4876]: I1215 06:57:00.424974 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:57:00 crc kubenswrapper[4876]: I1215 06:57:00.425071 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:57:00 crc kubenswrapper[4876]: I1215 06:57:00.467838 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:57:01 crc kubenswrapper[4876]: I1215 06:57:01.070027 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f"] Dec 15 06:57:01 crc kubenswrapper[4876]: I1215 06:57:01.070425 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" podUID="707ef72c-ae1f-40f7-97c4-15bc261a2aec" containerName="route-controller-manager" containerID="cri-o://746616d610cad17b9a781c334af4c7dbfb6cb8c5df5b67d1ca1262b73ed93bbf" gracePeriod=30 Dec 15 06:57:01 crc kubenswrapper[4876]: I1215 06:57:01.175643 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fwlwf" Dec 15 06:57:01 crc kubenswrapper[4876]: I1215 06:57:01.184612 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 06:57:02 crc kubenswrapper[4876]: I1215 06:57:02.690829 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:57:02 crc kubenswrapper[4876]: I1215 06:57:02.690885 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:57:02 crc kubenswrapper[4876]: I1215 06:57:02.742376 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:57:02 crc kubenswrapper[4876]: I1215 06:57:02.832148 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:57:02 crc kubenswrapper[4876]: I1215 06:57:02.832199 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:57:02 crc kubenswrapper[4876]: I1215 06:57:02.868190 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.145333 4876 generic.go:334] "Generic (PLEG): container finished" podID="707ef72c-ae1f-40f7-97c4-15bc261a2aec" containerID="746616d610cad17b9a781c334af4c7dbfb6cb8c5df5b67d1ca1262b73ed93bbf" exitCode=0 Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.145503 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" event={"ID":"707ef72c-ae1f-40f7-97c4-15bc261a2aec","Type":"ContainerDied","Data":"746616d610cad17b9a781c334af4c7dbfb6cb8c5df5b67d1ca1262b73ed93bbf"} Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.185308 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5kn4w" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.192620 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.673569 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.707195 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn"] Dec 15 06:57:03 crc kubenswrapper[4876]: E1215 06:57:03.707523 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="707ef72c-ae1f-40f7-97c4-15bc261a2aec" containerName="route-controller-manager" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.707546 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="707ef72c-ae1f-40f7-97c4-15bc261a2aec" containerName="route-controller-manager" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.707761 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="707ef72c-ae1f-40f7-97c4-15bc261a2aec" containerName="route-controller-manager" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.712475 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.732818 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn"] Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.753854 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7w5c\" (UniqueName: \"kubernetes.io/projected/707ef72c-ae1f-40f7-97c4-15bc261a2aec-kube-api-access-q7w5c\") pod \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.753905 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/707ef72c-ae1f-40f7-97c4-15bc261a2aec-serving-cert\") pod \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.754023 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-config\") pod \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.754043 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-client-ca\") pod \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\" (UID: \"707ef72c-ae1f-40f7-97c4-15bc261a2aec\") " Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.756056 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-config" (OuterVolumeSpecName: "config") pod "707ef72c-ae1f-40f7-97c4-15bc261a2aec" (UID: "707ef72c-ae1f-40f7-97c4-15bc261a2aec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.756169 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-client-ca" (OuterVolumeSpecName: "client-ca") pod "707ef72c-ae1f-40f7-97c4-15bc261a2aec" (UID: "707ef72c-ae1f-40f7-97c4-15bc261a2aec"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.761804 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/707ef72c-ae1f-40f7-97c4-15bc261a2aec-kube-api-access-q7w5c" (OuterVolumeSpecName: "kube-api-access-q7w5c") pod "707ef72c-ae1f-40f7-97c4-15bc261a2aec" (UID: "707ef72c-ae1f-40f7-97c4-15bc261a2aec"). InnerVolumeSpecName "kube-api-access-q7w5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.762176 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/707ef72c-ae1f-40f7-97c4-15bc261a2aec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "707ef72c-ae1f-40f7-97c4-15bc261a2aec" (UID: "707ef72c-ae1f-40f7-97c4-15bc261a2aec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.855668 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95gvf\" (UniqueName: \"kubernetes.io/projected/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-kube-api-access-95gvf\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.855746 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-client-ca\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.855792 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-serving-cert\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.855839 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-config\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.856028 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-config\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.856050 4876 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/707ef72c-ae1f-40f7-97c4-15bc261a2aec-client-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.856070 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7w5c\" (UniqueName: \"kubernetes.io/projected/707ef72c-ae1f-40f7-97c4-15bc261a2aec-kube-api-access-q7w5c\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.856089 4876 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/707ef72c-ae1f-40f7-97c4-15bc261a2aec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.957169 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95gvf\" (UniqueName: \"kubernetes.io/projected/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-kube-api-access-95gvf\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.957221 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-client-ca\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.957257 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-serving-cert\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.957282 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-config\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.958909 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-config\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.960669 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-client-ca\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.968918 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-serving-cert\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:03 crc kubenswrapper[4876]: I1215 06:57:03.975705 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95gvf\" (UniqueName: \"kubernetes.io/projected/784c68f8-cc4d-40d6-8d06-3856e5b8c4b7-kube-api-access-95gvf\") pod \"route-controller-manager-79c589ddc4-p5pnn\" (UID: \"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7\") " pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:04 crc kubenswrapper[4876]: I1215 06:57:04.055036 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:04 crc kubenswrapper[4876]: I1215 06:57:04.154429 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" event={"ID":"707ef72c-ae1f-40f7-97c4-15bc261a2aec","Type":"ContainerDied","Data":"bb1920c05bb365a0474342f5840124f9daf89225a1bb286443b2f87d4728c9e6"} Dec 15 06:57:04 crc kubenswrapper[4876]: I1215 06:57:04.154961 4876 scope.go:117] "RemoveContainer" containerID="746616d610cad17b9a781c334af4c7dbfb6cb8c5df5b67d1ca1262b73ed93bbf" Dec 15 06:57:04 crc kubenswrapper[4876]: I1215 06:57:04.155149 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f" Dec 15 06:57:04 crc kubenswrapper[4876]: I1215 06:57:04.204607 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f"] Dec 15 06:57:04 crc kubenswrapper[4876]: I1215 06:57:04.216893 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f5d69cd77-l487f"] Dec 15 06:57:04 crc kubenswrapper[4876]: I1215 06:57:04.477244 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn"] Dec 15 06:57:04 crc kubenswrapper[4876]: W1215 06:57:04.485269 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod784c68f8_cc4d_40d6_8d06_3856e5b8c4b7.slice/crio-96197588fb5f38003f356de624e15a2c99a6ee3cd762c0b686392ce3fd72971e WatchSource:0}: Error finding container 96197588fb5f38003f356de624e15a2c99a6ee3cd762c0b686392ce3fd72971e: Status 404 returned error can't find the container with id 96197588fb5f38003f356de624e15a2c99a6ee3cd762c0b686392ce3fd72971e Dec 15 06:57:04 crc kubenswrapper[4876]: I1215 06:57:04.712979 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="707ef72c-ae1f-40f7-97c4-15bc261a2aec" path="/var/lib/kubelet/pods/707ef72c-ae1f-40f7-97c4-15bc261a2aec/volumes" Dec 15 06:57:05 crc kubenswrapper[4876]: I1215 06:57:05.162716 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" event={"ID":"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7","Type":"ContainerStarted","Data":"8dc7eca8f6a207d133b1e7239dfc2e39befc94f47ca2fc275a64e611c56a0981"} Dec 15 06:57:05 crc kubenswrapper[4876]: I1215 06:57:05.162765 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" event={"ID":"784c68f8-cc4d-40d6-8d06-3856e5b8c4b7","Type":"ContainerStarted","Data":"96197588fb5f38003f356de624e15a2c99a6ee3cd762c0b686392ce3fd72971e"} Dec 15 06:57:05 crc kubenswrapper[4876]: I1215 06:57:05.164774 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:05 crc kubenswrapper[4876]: I1215 06:57:05.185673 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" podStartSLOduration=4.18564489 podStartE2EDuration="4.18564489s" podCreationTimestamp="2025-12-15 06:57:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 06:57:05.181604618 +0000 UTC m=+350.752747569" watchObservedRunningTime="2025-12-15 06:57:05.18564489 +0000 UTC m=+350.756787811" Dec 15 06:57:05 crc kubenswrapper[4876]: I1215 06:57:05.231224 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-79c589ddc4-p5pnn" Dec 15 06:57:06 crc kubenswrapper[4876]: I1215 06:57:06.414701 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-vgtt6" Dec 15 06:57:06 crc kubenswrapper[4876]: I1215 06:57:06.478220 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7k6p8"] Dec 15 06:57:27 crc kubenswrapper[4876]: I1215 06:57:27.322407 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 06:57:27 crc kubenswrapper[4876]: I1215 06:57:27.323282 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 06:57:31 crc kubenswrapper[4876]: I1215 06:57:31.529284 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" podUID="b3df8165-868f-49d9-a6ab-23aa9ce6e544" containerName="registry" containerID="cri-o://458ca23ba0d0f2c465dea7abbf94c15e9eeab6fec37c393a115b02e5b0f3f3b5" gracePeriod=30 Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.361141 4876 generic.go:334] "Generic (PLEG): container finished" podID="b3df8165-868f-49d9-a6ab-23aa9ce6e544" containerID="458ca23ba0d0f2c465dea7abbf94c15e9eeab6fec37c393a115b02e5b0f3f3b5" exitCode=0 Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.361210 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" event={"ID":"b3df8165-868f-49d9-a6ab-23aa9ce6e544","Type":"ContainerDied","Data":"458ca23ba0d0f2c465dea7abbf94c15e9eeab6fec37c393a115b02e5b0f3f3b5"} Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.459609 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.582860 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3df8165-868f-49d9-a6ab-23aa9ce6e544-installation-pull-secrets\") pod \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.582999 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-tls\") pod \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.583174 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3df8165-868f-49d9-a6ab-23aa9ce6e544-ca-trust-extracted\") pod \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.583565 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-certificates\") pod \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.584293 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-bound-sa-token\") pod \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.584455 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "b3df8165-868f-49d9-a6ab-23aa9ce6e544" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.584720 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.584812 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-trusted-ca\") pod \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.584869 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vpdm\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-kube-api-access-5vpdm\") pod \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.585409 4876 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.587049 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "b3df8165-868f-49d9-a6ab-23aa9ce6e544" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.591066 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "b3df8165-868f-49d9-a6ab-23aa9ce6e544" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.593734 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3df8165-868f-49d9-a6ab-23aa9ce6e544-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "b3df8165-868f-49d9-a6ab-23aa9ce6e544" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.595171 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-kube-api-access-5vpdm" (OuterVolumeSpecName: "kube-api-access-5vpdm") pod "b3df8165-868f-49d9-a6ab-23aa9ce6e544" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544"). InnerVolumeSpecName "kube-api-access-5vpdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.597625 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "b3df8165-868f-49d9-a6ab-23aa9ce6e544" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 06:57:32 crc kubenswrapper[4876]: E1215 06:57:32.598908 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:b3df8165-868f-49d9-a6ab-23aa9ce6e544 nodeName:}" failed. No retries permitted until 2025-12-15 06:57:33.098879384 +0000 UTC m=+378.670022295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "registry-storage" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "b3df8165-868f-49d9-a6ab-23aa9ce6e544" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544") : kubernetes.io/csi: Unmounter.TearDownAt failed: rpc error: code = Unknown desc = check target path: could not get consistent content of /proc/mounts after 3 attempts Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.608737 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3df8165-868f-49d9-a6ab-23aa9ce6e544-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "b3df8165-868f-49d9-a6ab-23aa9ce6e544" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.686795 4876 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.686834 4876 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b3df8165-868f-49d9-a6ab-23aa9ce6e544-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.686846 4876 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.686862 4876 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b3df8165-868f-49d9-a6ab-23aa9ce6e544-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.686873 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vpdm\" (UniqueName: \"kubernetes.io/projected/b3df8165-868f-49d9-a6ab-23aa9ce6e544-kube-api-access-5vpdm\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:32 crc kubenswrapper[4876]: I1215 06:57:32.686885 4876 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b3df8165-868f-49d9-a6ab-23aa9ce6e544-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 15 06:57:33 crc kubenswrapper[4876]: I1215 06:57:33.194698 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\" (UID: \"b3df8165-868f-49d9-a6ab-23aa9ce6e544\") " Dec 15 06:57:33 crc kubenswrapper[4876]: I1215 06:57:33.205709 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "b3df8165-868f-49d9-a6ab-23aa9ce6e544" (UID: "b3df8165-868f-49d9-a6ab-23aa9ce6e544"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 15 06:57:33 crc kubenswrapper[4876]: I1215 06:57:33.369089 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" event={"ID":"b3df8165-868f-49d9-a6ab-23aa9ce6e544","Type":"ContainerDied","Data":"5209888b39bdd93b01b6dc40a0aa9c67a9e1822c867d59ea4f32b45bacd390f9"} Dec 15 06:57:33 crc kubenswrapper[4876]: I1215 06:57:33.369192 4876 scope.go:117] "RemoveContainer" containerID="458ca23ba0d0f2c465dea7abbf94c15e9eeab6fec37c393a115b02e5b0f3f3b5" Dec 15 06:57:33 crc kubenswrapper[4876]: I1215 06:57:33.369220 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-7k6p8" Dec 15 06:57:33 crc kubenswrapper[4876]: I1215 06:57:33.412321 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7k6p8"] Dec 15 06:57:33 crc kubenswrapper[4876]: I1215 06:57:33.415096 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-7k6p8"] Dec 15 06:57:34 crc kubenswrapper[4876]: I1215 06:57:34.716665 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3df8165-868f-49d9-a6ab-23aa9ce6e544" path="/var/lib/kubelet/pods/b3df8165-868f-49d9-a6ab-23aa9ce6e544/volumes" Dec 15 06:57:57 crc kubenswrapper[4876]: I1215 06:57:57.323323 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 06:57:57 crc kubenswrapper[4876]: I1215 06:57:57.324395 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 06:57:57 crc kubenswrapper[4876]: I1215 06:57:57.324522 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 06:57:57 crc kubenswrapper[4876]: I1215 06:57:57.325606 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"825cf2c8f950e571313940a8e3c45129c0f9cb26b3446ce028d2134153b5b61d"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 06:57:57 crc kubenswrapper[4876]: I1215 06:57:57.325737 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://825cf2c8f950e571313940a8e3c45129c0f9cb26b3446ce028d2134153b5b61d" gracePeriod=600 Dec 15 06:57:57 crc kubenswrapper[4876]: I1215 06:57:57.558876 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="825cf2c8f950e571313940a8e3c45129c0f9cb26b3446ce028d2134153b5b61d" exitCode=0 Dec 15 06:57:57 crc kubenswrapper[4876]: I1215 06:57:57.558959 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"825cf2c8f950e571313940a8e3c45129c0f9cb26b3446ce028d2134153b5b61d"} Dec 15 06:57:57 crc kubenswrapper[4876]: I1215 06:57:57.559403 4876 scope.go:117] "RemoveContainer" containerID="c0855b224f62f0e4eb66f034e5b904a681eaf7bcebeb55daa1254e6768aaf1eb" Dec 15 06:57:58 crc kubenswrapper[4876]: I1215 06:57:58.565980 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"e6818f68fd203c3818ddb351a043cd465c1ed3e180a5504839ffc9282da9ce01"} Dec 15 06:59:57 crc kubenswrapper[4876]: I1215 06:59:57.323308 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 06:59:57 crc kubenswrapper[4876]: I1215 06:59:57.323919 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.212444 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f"] Dec 15 07:00:00 crc kubenswrapper[4876]: E1215 07:00:00.212675 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3df8165-868f-49d9-a6ab-23aa9ce6e544" containerName="registry" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.212690 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3df8165-868f-49d9-a6ab-23aa9ce6e544" containerName="registry" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.212792 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3df8165-868f-49d9-a6ab-23aa9ce6e544" containerName="registry" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.213232 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.215934 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.216588 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.224979 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f"] Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.365784 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6tbx\" (UniqueName: \"kubernetes.io/projected/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-kube-api-access-b6tbx\") pod \"collect-profiles-29429700-7xb8f\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.365995 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-config-volume\") pod \"collect-profiles-29429700-7xb8f\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.366079 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-secret-volume\") pod \"collect-profiles-29429700-7xb8f\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.467917 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6tbx\" (UniqueName: \"kubernetes.io/projected/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-kube-api-access-b6tbx\") pod \"collect-profiles-29429700-7xb8f\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.468006 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-config-volume\") pod \"collect-profiles-29429700-7xb8f\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.468036 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-secret-volume\") pod \"collect-profiles-29429700-7xb8f\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.470001 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-config-volume\") pod \"collect-profiles-29429700-7xb8f\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.485801 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-secret-volume\") pod \"collect-profiles-29429700-7xb8f\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.494438 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6tbx\" (UniqueName: \"kubernetes.io/projected/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-kube-api-access-b6tbx\") pod \"collect-profiles-29429700-7xb8f\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.537140 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:00 crc kubenswrapper[4876]: I1215 07:00:00.726803 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f"] Dec 15 07:00:01 crc kubenswrapper[4876]: I1215 07:00:01.385947 4876 generic.go:334] "Generic (PLEG): container finished" podID="4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b" containerID="c0c7a2bd1592e42e3397d66f19835e9c9f94dbfdd2b1ee6f205615abf667c0bb" exitCode=0 Dec 15 07:00:01 crc kubenswrapper[4876]: I1215 07:00:01.386000 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" event={"ID":"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b","Type":"ContainerDied","Data":"c0c7a2bd1592e42e3397d66f19835e9c9f94dbfdd2b1ee6f205615abf667c0bb"} Dec 15 07:00:01 crc kubenswrapper[4876]: I1215 07:00:01.387189 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" event={"ID":"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b","Type":"ContainerStarted","Data":"0e403f7baeabdef803eafb96599b41a9a32323cea20858f9edf611e504af1775"} Dec 15 07:00:02 crc kubenswrapper[4876]: I1215 07:00:02.648748 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:02 crc kubenswrapper[4876]: I1215 07:00:02.799565 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-secret-volume\") pod \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " Dec 15 07:00:02 crc kubenswrapper[4876]: I1215 07:00:02.799703 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6tbx\" (UniqueName: \"kubernetes.io/projected/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-kube-api-access-b6tbx\") pod \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " Dec 15 07:00:02 crc kubenswrapper[4876]: I1215 07:00:02.799773 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-config-volume\") pod \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\" (UID: \"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b\") " Dec 15 07:00:02 crc kubenswrapper[4876]: I1215 07:00:02.801229 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-config-volume" (OuterVolumeSpecName: "config-volume") pod "4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b" (UID: "4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:00:02 crc kubenswrapper[4876]: I1215 07:00:02.806745 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-kube-api-access-b6tbx" (OuterVolumeSpecName: "kube-api-access-b6tbx") pod "4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b" (UID: "4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b"). InnerVolumeSpecName "kube-api-access-b6tbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:00:02 crc kubenswrapper[4876]: I1215 07:00:02.807288 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b" (UID: "4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:00:02 crc kubenswrapper[4876]: I1215 07:00:02.901558 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 07:00:02 crc kubenswrapper[4876]: I1215 07:00:02.901622 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 07:00:02 crc kubenswrapper[4876]: I1215 07:00:02.901635 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6tbx\" (UniqueName: \"kubernetes.io/projected/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b-kube-api-access-b6tbx\") on node \"crc\" DevicePath \"\"" Dec 15 07:00:03 crc kubenswrapper[4876]: I1215 07:00:03.400838 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" event={"ID":"4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b","Type":"ContainerDied","Data":"0e403f7baeabdef803eafb96599b41a9a32323cea20858f9edf611e504af1775"} Dec 15 07:00:03 crc kubenswrapper[4876]: I1215 07:00:03.400875 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e403f7baeabdef803eafb96599b41a9a32323cea20858f9edf611e504af1775" Dec 15 07:00:03 crc kubenswrapper[4876]: I1215 07:00:03.400933 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f" Dec 15 07:00:27 crc kubenswrapper[4876]: I1215 07:00:27.322511 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:00:27 crc kubenswrapper[4876]: I1215 07:00:27.323064 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:00:57 crc kubenswrapper[4876]: I1215 07:00:57.323415 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:00:57 crc kubenswrapper[4876]: I1215 07:00:57.323963 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:00:57 crc kubenswrapper[4876]: I1215 07:00:57.324015 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:00:57 crc kubenswrapper[4876]: I1215 07:00:57.324618 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e6818f68fd203c3818ddb351a043cd465c1ed3e180a5504839ffc9282da9ce01"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:00:57 crc kubenswrapper[4876]: I1215 07:00:57.324666 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://e6818f68fd203c3818ddb351a043cd465c1ed3e180a5504839ffc9282da9ce01" gracePeriod=600 Dec 15 07:00:57 crc kubenswrapper[4876]: I1215 07:00:57.727429 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="e6818f68fd203c3818ddb351a043cd465c1ed3e180a5504839ffc9282da9ce01" exitCode=0 Dec 15 07:00:57 crc kubenswrapper[4876]: I1215 07:00:57.727531 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"e6818f68fd203c3818ddb351a043cd465c1ed3e180a5504839ffc9282da9ce01"} Dec 15 07:00:57 crc kubenswrapper[4876]: I1215 07:00:57.727883 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"0acc14dfc2de181ac94b53d2a5a6aa702465dae0249f92111bf270d15ac0837a"} Dec 15 07:00:57 crc kubenswrapper[4876]: I1215 07:00:57.727910 4876 scope.go:117] "RemoveContainer" containerID="825cf2c8f950e571313940a8e3c45129c0f9cb26b3446ce028d2134153b5b61d" Dec 15 07:02:57 crc kubenswrapper[4876]: I1215 07:02:57.322678 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:02:57 crc kubenswrapper[4876]: I1215 07:02:57.323223 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:03:27 crc kubenswrapper[4876]: I1215 07:03:27.322925 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:03:27 crc kubenswrapper[4876]: I1215 07:03:27.323574 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:03:54 crc kubenswrapper[4876]: I1215 07:03:54.416239 4876 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 15 07:03:57 crc kubenswrapper[4876]: I1215 07:03:57.323007 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:03:57 crc kubenswrapper[4876]: I1215 07:03:57.323158 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:03:57 crc kubenswrapper[4876]: I1215 07:03:57.323227 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:03:57 crc kubenswrapper[4876]: I1215 07:03:57.324147 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0acc14dfc2de181ac94b53d2a5a6aa702465dae0249f92111bf270d15ac0837a"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:03:57 crc kubenswrapper[4876]: I1215 07:03:57.324249 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://0acc14dfc2de181ac94b53d2a5a6aa702465dae0249f92111bf270d15ac0837a" gracePeriod=600 Dec 15 07:03:57 crc kubenswrapper[4876]: I1215 07:03:57.861907 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="0acc14dfc2de181ac94b53d2a5a6aa702465dae0249f92111bf270d15ac0837a" exitCode=0 Dec 15 07:03:57 crc kubenswrapper[4876]: I1215 07:03:57.862012 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"0acc14dfc2de181ac94b53d2a5a6aa702465dae0249f92111bf270d15ac0837a"} Dec 15 07:03:57 crc kubenswrapper[4876]: I1215 07:03:57.862358 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"3a4b809547f569825abd99f382a2c171e3307d4226f6eeeddde69774b94f79df"} Dec 15 07:03:57 crc kubenswrapper[4876]: I1215 07:03:57.862402 4876 scope.go:117] "RemoveContainer" containerID="e6818f68fd203c3818ddb351a043cd465c1ed3e180a5504839ffc9282da9ce01" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.272048 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n9rrq"] Dec 15 07:04:05 crc kubenswrapper[4876]: E1215 07:04:05.272705 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b" containerName="collect-profiles" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.272716 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b" containerName="collect-profiles" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.272803 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b" containerName="collect-profiles" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.273629 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.283062 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n9rrq"] Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.464142 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-utilities\") pod \"redhat-marketplace-n9rrq\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.464202 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-catalog-content\") pod \"redhat-marketplace-n9rrq\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.464233 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdngb\" (UniqueName: \"kubernetes.io/projected/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-kube-api-access-fdngb\") pod \"redhat-marketplace-n9rrq\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.565015 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-utilities\") pod \"redhat-marketplace-n9rrq\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.565367 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-catalog-content\") pod \"redhat-marketplace-n9rrq\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.565493 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdngb\" (UniqueName: \"kubernetes.io/projected/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-kube-api-access-fdngb\") pod \"redhat-marketplace-n9rrq\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.565969 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-utilities\") pod \"redhat-marketplace-n9rrq\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.565976 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-catalog-content\") pod \"redhat-marketplace-n9rrq\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.585874 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdngb\" (UniqueName: \"kubernetes.io/projected/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-kube-api-access-fdngb\") pod \"redhat-marketplace-n9rrq\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.593032 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.801225 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n9rrq"] Dec 15 07:04:05 crc kubenswrapper[4876]: I1215 07:04:05.926664 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9rrq" event={"ID":"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7","Type":"ContainerStarted","Data":"f0d451f7d85e3ca0719f6ba3572bf29e69fbefc803d42cf32bec3890f766cf08"} Dec 15 07:04:06 crc kubenswrapper[4876]: I1215 07:04:06.932391 4876 generic.go:334] "Generic (PLEG): container finished" podID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerID="9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964" exitCode=0 Dec 15 07:04:06 crc kubenswrapper[4876]: I1215 07:04:06.932449 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9rrq" event={"ID":"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7","Type":"ContainerDied","Data":"9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964"} Dec 15 07:04:06 crc kubenswrapper[4876]: I1215 07:04:06.935631 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 07:04:08 crc kubenswrapper[4876]: I1215 07:04:08.944637 4876 generic.go:334] "Generic (PLEG): container finished" podID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerID="376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2" exitCode=0 Dec 15 07:04:08 crc kubenswrapper[4876]: I1215 07:04:08.944720 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9rrq" event={"ID":"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7","Type":"ContainerDied","Data":"376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2"} Dec 15 07:04:09 crc kubenswrapper[4876]: I1215 07:04:09.955665 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9rrq" event={"ID":"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7","Type":"ContainerStarted","Data":"d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654"} Dec 15 07:04:09 crc kubenswrapper[4876]: I1215 07:04:09.981877 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n9rrq" podStartSLOduration=2.136487034 podStartE2EDuration="4.981858927s" podCreationTimestamp="2025-12-15 07:04:05 +0000 UTC" firstStartedPulling="2025-12-15 07:04:06.935417717 +0000 UTC m=+772.506560618" lastFinishedPulling="2025-12-15 07:04:09.7807896 +0000 UTC m=+775.351932511" observedRunningTime="2025-12-15 07:04:09.981183074 +0000 UTC m=+775.552326035" watchObservedRunningTime="2025-12-15 07:04:09.981858927 +0000 UTC m=+775.553001838" Dec 15 07:04:15 crc kubenswrapper[4876]: I1215 07:04:15.593818 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:15 crc kubenswrapper[4876]: I1215 07:04:15.594393 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:15 crc kubenswrapper[4876]: I1215 07:04:15.655477 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:16 crc kubenswrapper[4876]: I1215 07:04:16.035819 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:16 crc kubenswrapper[4876]: I1215 07:04:16.086123 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n9rrq"] Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.007701 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n9rrq" podUID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerName="registry-server" containerID="cri-o://d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654" gracePeriod=2 Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.340680 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.434065 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-utilities\") pod \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.434199 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdngb\" (UniqueName: \"kubernetes.io/projected/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-kube-api-access-fdngb\") pod \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.434345 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-catalog-content\") pod \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\" (UID: \"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7\") " Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.435378 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-utilities" (OuterVolumeSpecName: "utilities") pod "dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" (UID: "dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.439994 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-kube-api-access-fdngb" (OuterVolumeSpecName: "kube-api-access-fdngb") pod "dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" (UID: "dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7"). InnerVolumeSpecName "kube-api-access-fdngb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.456327 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" (UID: "dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.535039 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.535077 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:04:18 crc kubenswrapper[4876]: I1215 07:04:18.535088 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdngb\" (UniqueName: \"kubernetes.io/projected/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7-kube-api-access-fdngb\") on node \"crc\" DevicePath \"\"" Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.014515 4876 generic.go:334] "Generic (PLEG): container finished" podID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerID="d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654" exitCode=0 Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.014563 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9rrq" event={"ID":"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7","Type":"ContainerDied","Data":"d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654"} Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.014590 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9rrq" event={"ID":"dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7","Type":"ContainerDied","Data":"f0d451f7d85e3ca0719f6ba3572bf29e69fbefc803d42cf32bec3890f766cf08"} Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.014600 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n9rrq" Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.014661 4876 scope.go:117] "RemoveContainer" containerID="d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654" Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.033673 4876 scope.go:117] "RemoveContainer" containerID="376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2" Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.037627 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n9rrq"] Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.043855 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n9rrq"] Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.051249 4876 scope.go:117] "RemoveContainer" containerID="9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964" Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.064269 4876 scope.go:117] "RemoveContainer" containerID="d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654" Dec 15 07:04:19 crc kubenswrapper[4876]: E1215 07:04:19.064675 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654\": container with ID starting with d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654 not found: ID does not exist" containerID="d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654" Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.064716 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654"} err="failed to get container status \"d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654\": rpc error: code = NotFound desc = could not find container \"d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654\": container with ID starting with d16b8286765fea8a6d4a5c6265dd748dea9e5108fe13870c91840a6c348c3654 not found: ID does not exist" Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.064745 4876 scope.go:117] "RemoveContainer" containerID="376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2" Dec 15 07:04:19 crc kubenswrapper[4876]: E1215 07:04:19.065240 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2\": container with ID starting with 376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2 not found: ID does not exist" containerID="376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2" Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.065337 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2"} err="failed to get container status \"376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2\": rpc error: code = NotFound desc = could not find container \"376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2\": container with ID starting with 376001f13ae4058bc42f8bf1a0079ac7d1bc8fad4558423b2755789f14e38fa2 not found: ID does not exist" Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.065372 4876 scope.go:117] "RemoveContainer" containerID="9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964" Dec 15 07:04:19 crc kubenswrapper[4876]: E1215 07:04:19.065709 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964\": container with ID starting with 9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964 not found: ID does not exist" containerID="9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964" Dec 15 07:04:19 crc kubenswrapper[4876]: I1215 07:04:19.065743 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964"} err="failed to get container status \"9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964\": rpc error: code = NotFound desc = could not find container \"9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964\": container with ID starting with 9a6073b99feebb9f7041cbedbdba61597e9f9356fefadb6375bf056ed75d0964 not found: ID does not exist" Dec 15 07:04:20 crc kubenswrapper[4876]: I1215 07:04:20.712255 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" path="/var/lib/kubelet/pods/dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7/volumes" Dec 15 07:05:01 crc kubenswrapper[4876]: I1215 07:05:01.727176 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-wm92c"] Dec 15 07:05:01 crc kubenswrapper[4876]: I1215 07:05:01.728364 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovn-controller" containerID="cri-o://fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766" gracePeriod=30 Dec 15 07:05:01 crc kubenswrapper[4876]: I1215 07:05:01.728523 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovn-acl-logging" containerID="cri-o://da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53" gracePeriod=30 Dec 15 07:05:01 crc kubenswrapper[4876]: I1215 07:05:01.728505 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="nbdb" containerID="cri-o://811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d" gracePeriod=30 Dec 15 07:05:01 crc kubenswrapper[4876]: I1215 07:05:01.728590 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="sbdb" containerID="cri-o://5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340" gracePeriod=30 Dec 15 07:05:01 crc kubenswrapper[4876]: I1215 07:05:01.728617 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="northd" containerID="cri-o://9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b" gracePeriod=30 Dec 15 07:05:01 crc kubenswrapper[4876]: I1215 07:05:01.728680 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a" gracePeriod=30 Dec 15 07:05:01 crc kubenswrapper[4876]: I1215 07:05:01.728902 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="kube-rbac-proxy-node" containerID="cri-o://5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8" gracePeriod=30 Dec 15 07:05:01 crc kubenswrapper[4876]: I1215 07:05:01.765284 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" containerID="cri-o://d114696e400065dcb1a37c4ed1dd3612ec78a17c0ddc386bd9405341a22ba343" gracePeriod=30 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.260375 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ddcwq_d2c0440d-a8eb-4f51-8626-c3bb9d1b0867/kube-multus/2.log" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.261386 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ddcwq_d2c0440d-a8eb-4f51-8626-c3bb9d1b0867/kube-multus/1.log" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.261482 4876 generic.go:334] "Generic (PLEG): container finished" podID="d2c0440d-a8eb-4f51-8626-c3bb9d1b0867" containerID="1cb7b04b03ffa94f95360a75b866a80620d08fc9d3cff5391fa24f7a0627d3be" exitCode=2 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.261561 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ddcwq" event={"ID":"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867","Type":"ContainerDied","Data":"1cb7b04b03ffa94f95360a75b866a80620d08fc9d3cff5391fa24f7a0627d3be"} Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.261612 4876 scope.go:117] "RemoveContainer" containerID="112f3a84a1c36b9409cffacdd579bb010218c94f8d0539136173120d25c87008" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.262173 4876 scope.go:117] "RemoveContainer" containerID="1cb7b04b03ffa94f95360a75b866a80620d08fc9d3cff5391fa24f7a0627d3be" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.269182 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovnkube-controller/3.log" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.272379 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovn-acl-logging/0.log" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.273983 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovn-controller/0.log" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274629 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="d114696e400065dcb1a37c4ed1dd3612ec78a17c0ddc386bd9405341a22ba343" exitCode=0 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274671 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340" exitCode=0 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274684 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d" exitCode=0 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274695 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b" exitCode=0 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274706 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a" exitCode=0 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274715 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8" exitCode=0 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274726 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53" exitCode=143 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274738 4876 generic.go:334] "Generic (PLEG): container finished" podID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerID="fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766" exitCode=143 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274775 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"d114696e400065dcb1a37c4ed1dd3612ec78a17c0ddc386bd9405341a22ba343"} Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274825 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340"} Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274842 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d"} Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274854 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b"} Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274869 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a"} Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274882 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8"} Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274894 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53"} Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.274906 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766"} Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.308085 4876 scope.go:117] "RemoveContainer" containerID="562aeda81ea765ab9f3266387e0315fc1a18c5d08f1a3e232e1df5c8ffe20e93" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.478991 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovn-acl-logging/0.log" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.480009 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovn-controller/0.log" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.480723 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.539286 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-7z6hx"] Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.540011 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="kubecfg-setup" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.540137 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="kubecfg-setup" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.540221 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.540300 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.540371 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="kube-rbac-proxy-node" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.540440 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="kube-rbac-proxy-node" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.540508 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovn-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.540572 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovn-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.540657 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.540897 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.540916 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerName="extract-content" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.540924 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerName="extract-content" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.540937 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerName="extract-utilities" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.540945 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerName="extract-utilities" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.540956 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="kube-rbac-proxy-ovn-metrics" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.540964 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="kube-rbac-proxy-ovn-metrics" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.540978 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerName="registry-server" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.540986 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerName="registry-server" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.540994 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovn-acl-logging" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.541004 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovn-acl-logging" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.541012 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.541018 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.541026 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="sbdb" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.541033 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="sbdb" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.541041 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="northd" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.541048 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="northd" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.541055 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="nbdb" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.541061 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="nbdb" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.541069 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.541074 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.542308 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovn-acl-logging" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.542375 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="northd" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.542425 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovn-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.542483 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="kube-rbac-proxy-node" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.542544 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.542629 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.542683 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.542751 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="kube-rbac-proxy-ovn-metrics" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.542830 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd42c6ec-e59b-43d6-9589-fa9e5a6b10d7" containerName="registry-server" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.542964 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="sbdb" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.543289 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="nbdb" Dec 15 07:05:02 crc kubenswrapper[4876]: E1215 07:05:02.543463 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.543658 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.543852 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.543912 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" containerName="ovnkube-controller" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.547161 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.603843 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-slash\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.603925 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzj5w\" (UniqueName: \"kubernetes.io/projected/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-kube-api-access-zzj5w\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.603971 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-etc-openvswitch\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604002 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovn-node-metrics-cert\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.603986 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-slash" (OuterVolumeSpecName: "host-slash") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604021 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-netns\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604101 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604134 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-openvswitch\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604187 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604222 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-var-lib-openvswitch\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604262 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604265 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-script-lib\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604356 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-node-log\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604392 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-systemd-units\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604467 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-ovn\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604504 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-config\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604535 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-bin\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604569 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-ovn-kubernetes\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604595 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-netd\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604637 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-kubelet\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604674 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-systemd\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604753 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-env-overrides\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604796 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-log-socket\") pod \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\" (UID: \"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c\") " Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604881 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.604938 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605004 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605014 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605044 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605062 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-node-log" (OuterVolumeSpecName: "node-log") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605129 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605167 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605207 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605248 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605322 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-log-socket" (OuterVolumeSpecName: "log-socket") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605563 4876 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-log-socket\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605590 4876 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-slash\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605602 4876 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605617 4876 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605630 4876 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605642 4876 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605655 4876 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605664 4876 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-node-log\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605674 4876 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605686 4876 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605698 4876 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605709 4876 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605724 4876 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605734 4876 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605790 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605832 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.605996 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.611644 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-kube-api-access-zzj5w" (OuterVolumeSpecName: "kube-api-access-zzj5w") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "kube-api-access-zzj5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.612276 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.625198 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" (UID: "2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709368 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-node-log\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709430 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/03f52890-3e20-4bc0-a443-6070253fa18e-env-overrides\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709467 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-run-ovn-kubernetes\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709515 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-log-socket\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709552 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ls8xn\" (UniqueName: \"kubernetes.io/projected/03f52890-3e20-4bc0-a443-6070253fa18e-kube-api-access-ls8xn\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709580 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-run-netns\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709604 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-run-systemd\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709634 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-systemd-units\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709688 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-kubelet\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709710 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-run-openvswitch\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709733 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-cni-netd\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709759 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-cni-bin\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709780 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-var-lib-openvswitch\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709813 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/03f52890-3e20-4bc0-a443-6070253fa18e-ovn-node-metrics-cert\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709836 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709883 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/03f52890-3e20-4bc0-a443-6070253fa18e-ovnkube-script-lib\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709905 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-run-ovn\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709939 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-etc-openvswitch\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709970 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-slash\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.709989 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/03f52890-3e20-4bc0-a443-6070253fa18e-ovnkube-config\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.710046 4876 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.710061 4876 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.710075 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzj5w\" (UniqueName: \"kubernetes.io/projected/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-kube-api-access-zzj5w\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.710087 4876 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.710121 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.710135 4876 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811721 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-var-lib-openvswitch\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811783 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/03f52890-3e20-4bc0-a443-6070253fa18e-ovn-node-metrics-cert\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811802 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811826 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/03f52890-3e20-4bc0-a443-6070253fa18e-ovnkube-script-lib\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811842 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-run-ovn\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811862 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-etc-openvswitch\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811881 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-slash\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811900 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/03f52890-3e20-4bc0-a443-6070253fa18e-ovnkube-config\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811920 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-node-log\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811938 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/03f52890-3e20-4bc0-a443-6070253fa18e-env-overrides\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811957 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-run-ovn-kubernetes\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811980 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-log-socket\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811974 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.811999 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ls8xn\" (UniqueName: \"kubernetes.io/projected/03f52890-3e20-4bc0-a443-6070253fa18e-kube-api-access-ls8xn\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812015 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-run-netns\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812032 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-run-systemd\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812052 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-var-lib-openvswitch\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812058 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-systemd-units\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812127 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-systemd-units\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812126 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-run-openvswitch\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812170 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-cni-netd\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812186 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-kubelet\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812201 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-cni-bin\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812248 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-cni-bin\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812789 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-run-ovn-kubernetes\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812893 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-run-ovn\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812926 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-etc-openvswitch\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812959 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/03f52890-3e20-4bc0-a443-6070253fa18e-ovnkube-script-lib\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812993 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-slash\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.813020 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-log-socket\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.812151 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-run-openvswitch\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.813198 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-run-netns\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.813211 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-cni-netd\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.813228 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-run-systemd\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.813251 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-host-kubelet\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.813362 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/03f52890-3e20-4bc0-a443-6070253fa18e-node-log\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.813677 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/03f52890-3e20-4bc0-a443-6070253fa18e-env-overrides\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.814074 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/03f52890-3e20-4bc0-a443-6070253fa18e-ovnkube-config\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.817062 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/03f52890-3e20-4bc0-a443-6070253fa18e-ovn-node-metrics-cert\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.832986 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ls8xn\" (UniqueName: \"kubernetes.io/projected/03f52890-3e20-4bc0-a443-6070253fa18e-kube-api-access-ls8xn\") pod \"ovnkube-node-7z6hx\" (UID: \"03f52890-3e20-4bc0-a443-6070253fa18e\") " pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.863961 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:02 crc kubenswrapper[4876]: W1215 07:05:02.888569 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03f52890_3e20_4bc0_a443_6070253fa18e.slice/crio-d377173b768193adf46e151d1c97560e07c1dbe348ceb7dad281e8ec39923103 WatchSource:0}: Error finding container d377173b768193adf46e151d1c97560e07c1dbe348ceb7dad281e8ec39923103: Status 404 returned error can't find the container with id d377173b768193adf46e151d1c97560e07c1dbe348ceb7dad281e8ec39923103 Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.978373 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4wq9n"] Dec 15 07:05:02 crc kubenswrapper[4876]: I1215 07:05:02.980997 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.015273 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-utilities\") pod \"redhat-operators-4wq9n\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.015862 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-catalog-content\") pod \"redhat-operators-4wq9n\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.015925 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gr6g\" (UniqueName: \"kubernetes.io/projected/2511bb97-1863-4f74-8a1a-b6957a95731b-kube-api-access-9gr6g\") pod \"redhat-operators-4wq9n\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.117846 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-utilities\") pod \"redhat-operators-4wq9n\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.117906 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-catalog-content\") pod \"redhat-operators-4wq9n\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.117953 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gr6g\" (UniqueName: \"kubernetes.io/projected/2511bb97-1863-4f74-8a1a-b6957a95731b-kube-api-access-9gr6g\") pod \"redhat-operators-4wq9n\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.118510 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-utilities\") pod \"redhat-operators-4wq9n\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.118644 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-catalog-content\") pod \"redhat-operators-4wq9n\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.138300 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gr6g\" (UniqueName: \"kubernetes.io/projected/2511bb97-1863-4f74-8a1a-b6957a95731b-kube-api-access-9gr6g\") pod \"redhat-operators-4wq9n\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.282234 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-ddcwq_d2c0440d-a8eb-4f51-8626-c3bb9d1b0867/kube-multus/2.log" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.282550 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-ddcwq" event={"ID":"d2c0440d-a8eb-4f51-8626-c3bb9d1b0867","Type":"ContainerStarted","Data":"c1a97021d1421b544ba3aa909c46c83e3e39642a687d777e53af94732a13e570"} Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.286534 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovn-acl-logging/0.log" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.287098 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-wm92c_2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/ovn-controller/0.log" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.287660 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.287647 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-wm92c" event={"ID":"2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c","Type":"ContainerDied","Data":"b4811f813be370967183741aaa81c940db056de7fd4fff263e4658a182cf728d"} Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.287883 4876 scope.go:117] "RemoveContainer" containerID="d114696e400065dcb1a37c4ed1dd3612ec78a17c0ddc386bd9405341a22ba343" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.288539 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" event={"ID":"03f52890-3e20-4bc0-a443-6070253fa18e","Type":"ContainerStarted","Data":"d377173b768193adf46e151d1c97560e07c1dbe348ceb7dad281e8ec39923103"} Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.304160 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.308252 4876 scope.go:117] "RemoveContainer" containerID="5a6998041a6e97267646fdd9a13badd63f6f27259d969aad8b6104646edca340" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.327935 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-wm92c"] Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.336391 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-wm92c"] Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.343505 4876 scope.go:117] "RemoveContainer" containerID="811491ab5e69d6e086067f4732c464a065f92d8cfb99793beff6f3a343631e7d" Dec 15 07:05:03 crc kubenswrapper[4876]: E1215 07:05:03.360625 4876 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-4wq9n_openshift-marketplace_2511bb97-1863-4f74-8a1a-b6957a95731b_0(c0f4f8735b469245b1e12b91f516934d23863a72436947315b832836cef64a7c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 15 07:05:03 crc kubenswrapper[4876]: E1215 07:05:03.360705 4876 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-4wq9n_openshift-marketplace_2511bb97-1863-4f74-8a1a-b6957a95731b_0(c0f4f8735b469245b1e12b91f516934d23863a72436947315b832836cef64a7c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: E1215 07:05:03.360733 4876 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-4wq9n_openshift-marketplace_2511bb97-1863-4f74-8a1a-b6957a95731b_0(c0f4f8735b469245b1e12b91f516934d23863a72436947315b832836cef64a7c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:03 crc kubenswrapper[4876]: E1215 07:05:03.360791 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"redhat-operators-4wq9n_openshift-marketplace(2511bb97-1863-4f74-8a1a-b6957a95731b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"redhat-operators-4wq9n_openshift-marketplace(2511bb97-1863-4f74-8a1a-b6957a95731b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-4wq9n_openshift-marketplace_2511bb97-1863-4f74-8a1a-b6957a95731b_0(c0f4f8735b469245b1e12b91f516934d23863a72436947315b832836cef64a7c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/redhat-operators-4wq9n" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.386441 4876 scope.go:117] "RemoveContainer" containerID="9aa24f88b7fa7f391863f46fa4b1dbdca7dbd62a82f0d48af719ab1bde10e70b" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.402990 4876 scope.go:117] "RemoveContainer" containerID="706c5113c7e6657cd4a59b167b78d6028a2cdbdd61f5ee2578750a41108b9a8a" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.426008 4876 scope.go:117] "RemoveContainer" containerID="5510a676c8fd61d4b3735ea35090b0da95b16649ebe4232177a02fbd5e0ca5d8" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.441387 4876 scope.go:117] "RemoveContainer" containerID="da15c8c6c59fffbf5c620951425c730b6a17f8f79f5cbfc54fb3dc16b6f42e53" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.455026 4876 scope.go:117] "RemoveContainer" containerID="fa9bc905782e7d36747c925127cf56ff17eec82bb1fef1a335d7e35267599766" Dec 15 07:05:03 crc kubenswrapper[4876]: I1215 07:05:03.476470 4876 scope.go:117] "RemoveContainer" containerID="4b19f147f6d2044968f16109a851656db1fc3c0c9a92fa3f2dd6830a4c5c18de" Dec 15 07:05:04 crc kubenswrapper[4876]: I1215 07:05:04.295498 4876 generic.go:334] "Generic (PLEG): container finished" podID="03f52890-3e20-4bc0-a443-6070253fa18e" containerID="2aec55f69624e5ed1d97f114e1bcdab253cba3b5e8bbd489e8488476d48d3393" exitCode=0 Dec 15 07:05:04 crc kubenswrapper[4876]: I1215 07:05:04.295566 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" event={"ID":"03f52890-3e20-4bc0-a443-6070253fa18e","Type":"ContainerDied","Data":"2aec55f69624e5ed1d97f114e1bcdab253cba3b5e8bbd489e8488476d48d3393"} Dec 15 07:05:04 crc kubenswrapper[4876]: I1215 07:05:04.714435 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c" path="/var/lib/kubelet/pods/2bf97f83-6aa5-4a4d-9d1b-e84244c6d44c/volumes" Dec 15 07:05:05 crc kubenswrapper[4876]: I1215 07:05:05.306769 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" event={"ID":"03f52890-3e20-4bc0-a443-6070253fa18e","Type":"ContainerStarted","Data":"8dbd326e5e98dc01de16002a2b34b26a633a600ca06733afa2f64184b3c0047a"} Dec 15 07:05:05 crc kubenswrapper[4876]: I1215 07:05:05.308054 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" event={"ID":"03f52890-3e20-4bc0-a443-6070253fa18e","Type":"ContainerStarted","Data":"3ce2a6cb3fb5a7180bd3b3ab8d053b025d430283b54cd7a55a880a2daf2e3109"} Dec 15 07:05:05 crc kubenswrapper[4876]: I1215 07:05:05.308076 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" event={"ID":"03f52890-3e20-4bc0-a443-6070253fa18e","Type":"ContainerStarted","Data":"230a487a84a0965a41ad13a11e943bc5c5fafe5df3a8ca33544bb957e6f8509a"} Dec 15 07:05:05 crc kubenswrapper[4876]: I1215 07:05:05.308091 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" event={"ID":"03f52890-3e20-4bc0-a443-6070253fa18e","Type":"ContainerStarted","Data":"0ee39f589fa984429f82d02698d9ece7f730b75150e27770889cd2a4928246e8"} Dec 15 07:05:05 crc kubenswrapper[4876]: I1215 07:05:05.308124 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" event={"ID":"03f52890-3e20-4bc0-a443-6070253fa18e","Type":"ContainerStarted","Data":"be4a605d997811b5c83c54e4ee0db829d2978b71e3655e93ed69ca06744f09af"} Dec 15 07:05:05 crc kubenswrapper[4876]: I1215 07:05:05.308138 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" event={"ID":"03f52890-3e20-4bc0-a443-6070253fa18e","Type":"ContainerStarted","Data":"40f143c7f0a9c3836965a72fe9a94c0025f82e793d861a0cdccb61840261fed7"} Dec 15 07:05:07 crc kubenswrapper[4876]: I1215 07:05:07.319347 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" event={"ID":"03f52890-3e20-4bc0-a443-6070253fa18e","Type":"ContainerStarted","Data":"b05c588496f749271880f9e19f087a53bdac0b1403fa2ae46b1309f6424da4b5"} Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.281131 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-cccw6"] Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.282224 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.284136 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.284147 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.284644 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.284840 4876 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-gjdrn" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.311387 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-node-mnt\") pod \"crc-storage-crc-cccw6\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.311464 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-crc-storage\") pod \"crc-storage-crc-cccw6\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.311571 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spff4\" (UniqueName: \"kubernetes.io/projected/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-kube-api-access-spff4\") pod \"crc-storage-crc-cccw6\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.341530 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" event={"ID":"03f52890-3e20-4bc0-a443-6070253fa18e","Type":"ContainerStarted","Data":"2d96bc21e26dafb1533dfc890788fb7293f048c86008d0dc0540830efd43771b"} Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.341937 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.368468 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.370688 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" podStartSLOduration=8.370666974 podStartE2EDuration="8.370666974s" podCreationTimestamp="2025-12-15 07:05:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:05:10.368680917 +0000 UTC m=+835.939823858" watchObservedRunningTime="2025-12-15 07:05:10.370666974 +0000 UTC m=+835.941809885" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.412091 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-node-mnt\") pod \"crc-storage-crc-cccw6\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.412194 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-crc-storage\") pod \"crc-storage-crc-cccw6\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.412262 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spff4\" (UniqueName: \"kubernetes.io/projected/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-kube-api-access-spff4\") pod \"crc-storage-crc-cccw6\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.412404 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-node-mnt\") pod \"crc-storage-crc-cccw6\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.413011 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-crc-storage\") pod \"crc-storage-crc-cccw6\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.436047 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spff4\" (UniqueName: \"kubernetes.io/projected/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-kube-api-access-spff4\") pod \"crc-storage-crc-cccw6\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.596205 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: E1215 07:05:10.623451 4876 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-cccw6_crc-storage_1ebf5f89-26e8-4ee0-9de4-2c218574e6df_0(0d6b38d6df3b3a372f9bc62a8278f1778c96633a47c73b5f0486eb4f76a2e3f2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 15 07:05:10 crc kubenswrapper[4876]: E1215 07:05:10.623580 4876 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-cccw6_crc-storage_1ebf5f89-26e8-4ee0-9de4-2c218574e6df_0(0d6b38d6df3b3a372f9bc62a8278f1778c96633a47c73b5f0486eb4f76a2e3f2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: E1215 07:05:10.623604 4876 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-cccw6_crc-storage_1ebf5f89-26e8-4ee0-9de4-2c218574e6df_0(0d6b38d6df3b3a372f9bc62a8278f1778c96633a47c73b5f0486eb4f76a2e3f2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:10 crc kubenswrapper[4876]: E1215 07:05:10.623648 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-cccw6_crc-storage(1ebf5f89-26e8-4ee0-9de4-2c218574e6df)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-cccw6_crc-storage(1ebf5f89-26e8-4ee0-9de4-2c218574e6df)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-cccw6_crc-storage_1ebf5f89-26e8-4ee0-9de4-2c218574e6df_0(0d6b38d6df3b3a372f9bc62a8278f1778c96633a47c73b5f0486eb4f76a2e3f2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-cccw6" podUID="1ebf5f89-26e8-4ee0-9de4-2c218574e6df" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.800409 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-cccw6"] Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.809843 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4wq9n"] Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.809977 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:10 crc kubenswrapper[4876]: I1215 07:05:10.810384 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:10 crc kubenswrapper[4876]: E1215 07:05:10.864286 4876 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-4wq9n_openshift-marketplace_2511bb97-1863-4f74-8a1a-b6957a95731b_0(5401939d0fed9f05700e1593c15d65c1aab6f66afe60c1be7c809358112abbef): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 15 07:05:10 crc kubenswrapper[4876]: E1215 07:05:10.864386 4876 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-4wq9n_openshift-marketplace_2511bb97-1863-4f74-8a1a-b6957a95731b_0(5401939d0fed9f05700e1593c15d65c1aab6f66afe60c1be7c809358112abbef): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:10 crc kubenswrapper[4876]: E1215 07:05:10.864410 4876 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-4wq9n_openshift-marketplace_2511bb97-1863-4f74-8a1a-b6957a95731b_0(5401939d0fed9f05700e1593c15d65c1aab6f66afe60c1be7c809358112abbef): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:10 crc kubenswrapper[4876]: E1215 07:05:10.864464 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"redhat-operators-4wq9n_openshift-marketplace(2511bb97-1863-4f74-8a1a-b6957a95731b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"redhat-operators-4wq9n_openshift-marketplace(2511bb97-1863-4f74-8a1a-b6957a95731b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_redhat-operators-4wq9n_openshift-marketplace_2511bb97-1863-4f74-8a1a-b6957a95731b_0(5401939d0fed9f05700e1593c15d65c1aab6f66afe60c1be7c809358112abbef): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/redhat-operators-4wq9n" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" Dec 15 07:05:11 crc kubenswrapper[4876]: I1215 07:05:11.347600 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:11 crc kubenswrapper[4876]: I1215 07:05:11.348521 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:11 crc kubenswrapper[4876]: I1215 07:05:11.348589 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:11 crc kubenswrapper[4876]: I1215 07:05:11.349043 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:11 crc kubenswrapper[4876]: E1215 07:05:11.374481 4876 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-cccw6_crc-storage_1ebf5f89-26e8-4ee0-9de4-2c218574e6df_0(d1d4733e9fd9977561a315d30a150b07d49919f253a90640eef2c24dd0aae438): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 15 07:05:11 crc kubenswrapper[4876]: E1215 07:05:11.374561 4876 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-cccw6_crc-storage_1ebf5f89-26e8-4ee0-9de4-2c218574e6df_0(d1d4733e9fd9977561a315d30a150b07d49919f253a90640eef2c24dd0aae438): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:11 crc kubenswrapper[4876]: E1215 07:05:11.374585 4876 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-cccw6_crc-storage_1ebf5f89-26e8-4ee0-9de4-2c218574e6df_0(d1d4733e9fd9977561a315d30a150b07d49919f253a90640eef2c24dd0aae438): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:11 crc kubenswrapper[4876]: E1215 07:05:11.374624 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-cccw6_crc-storage(1ebf5f89-26e8-4ee0-9de4-2c218574e6df)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-cccw6_crc-storage(1ebf5f89-26e8-4ee0-9de4-2c218574e6df)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-cccw6_crc-storage_1ebf5f89-26e8-4ee0-9de4-2c218574e6df_0(d1d4733e9fd9977561a315d30a150b07d49919f253a90640eef2c24dd0aae438): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-cccw6" podUID="1ebf5f89-26e8-4ee0-9de4-2c218574e6df" Dec 15 07:05:11 crc kubenswrapper[4876]: I1215 07:05:11.378760 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:25 crc kubenswrapper[4876]: I1215 07:05:25.704553 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:25 crc kubenswrapper[4876]: I1215 07:05:25.704648 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:25 crc kubenswrapper[4876]: I1215 07:05:25.705777 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:25 crc kubenswrapper[4876]: I1215 07:05:25.705956 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:26 crc kubenswrapper[4876]: I1215 07:05:26.164628 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4wq9n"] Dec 15 07:05:26 crc kubenswrapper[4876]: I1215 07:05:26.174882 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-cccw6"] Dec 15 07:05:26 crc kubenswrapper[4876]: I1215 07:05:26.430664 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-cccw6" event={"ID":"1ebf5f89-26e8-4ee0-9de4-2c218574e6df","Type":"ContainerStarted","Data":"582625cbfa624d8f8ff1e114fa86a0a234576713081d3c65ac08651dfbc381d9"} Dec 15 07:05:26 crc kubenswrapper[4876]: I1215 07:05:26.432610 4876 generic.go:334] "Generic (PLEG): container finished" podID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerID="127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08" exitCode=0 Dec 15 07:05:26 crc kubenswrapper[4876]: I1215 07:05:26.432641 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq9n" event={"ID":"2511bb97-1863-4f74-8a1a-b6957a95731b","Type":"ContainerDied","Data":"127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08"} Dec 15 07:05:26 crc kubenswrapper[4876]: I1215 07:05:26.432656 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq9n" event={"ID":"2511bb97-1863-4f74-8a1a-b6957a95731b","Type":"ContainerStarted","Data":"083366d946876e523802c731f79520600b939e503272c48991504b148f5b649d"} Dec 15 07:05:28 crc kubenswrapper[4876]: I1215 07:05:28.441691 4876 generic.go:334] "Generic (PLEG): container finished" podID="1ebf5f89-26e8-4ee0-9de4-2c218574e6df" containerID="1fc9f577b5556289762f33baba66a8d91aac233d49845b27d2f08f03dcfa104c" exitCode=0 Dec 15 07:05:28 crc kubenswrapper[4876]: I1215 07:05:28.441759 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-cccw6" event={"ID":"1ebf5f89-26e8-4ee0-9de4-2c218574e6df","Type":"ContainerDied","Data":"1fc9f577b5556289762f33baba66a8d91aac233d49845b27d2f08f03dcfa104c"} Dec 15 07:05:28 crc kubenswrapper[4876]: I1215 07:05:28.444508 4876 generic.go:334] "Generic (PLEG): container finished" podID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerID="4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789" exitCode=0 Dec 15 07:05:28 crc kubenswrapper[4876]: I1215 07:05:28.444546 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq9n" event={"ID":"2511bb97-1863-4f74-8a1a-b6957a95731b","Type":"ContainerDied","Data":"4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789"} Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.451968 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq9n" event={"ID":"2511bb97-1863-4f74-8a1a-b6957a95731b","Type":"ContainerStarted","Data":"6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548"} Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.474591 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4wq9n" podStartSLOduration=24.736227951 podStartE2EDuration="27.474574591s" podCreationTimestamp="2025-12-15 07:05:02 +0000 UTC" firstStartedPulling="2025-12-15 07:05:26.433955636 +0000 UTC m=+852.005098547" lastFinishedPulling="2025-12-15 07:05:29.172302236 +0000 UTC m=+854.743445187" observedRunningTime="2025-12-15 07:05:29.472489101 +0000 UTC m=+855.043632032" watchObservedRunningTime="2025-12-15 07:05:29.474574591 +0000 UTC m=+855.045717512" Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.667598 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.858009 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spff4\" (UniqueName: \"kubernetes.io/projected/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-kube-api-access-spff4\") pod \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.858085 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-node-mnt\") pod \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.858138 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-crc-storage\") pod \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\" (UID: \"1ebf5f89-26e8-4ee0-9de4-2c218574e6df\") " Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.858408 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "1ebf5f89-26e8-4ee0-9de4-2c218574e6df" (UID: "1ebf5f89-26e8-4ee0-9de4-2c218574e6df"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.864916 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-kube-api-access-spff4" (OuterVolumeSpecName: "kube-api-access-spff4") pod "1ebf5f89-26e8-4ee0-9de4-2c218574e6df" (UID: "1ebf5f89-26e8-4ee0-9de4-2c218574e6df"). InnerVolumeSpecName "kube-api-access-spff4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.871710 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "1ebf5f89-26e8-4ee0-9de4-2c218574e6df" (UID: "1ebf5f89-26e8-4ee0-9de4-2c218574e6df"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.961604 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spff4\" (UniqueName: \"kubernetes.io/projected/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-kube-api-access-spff4\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.961665 4876 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:29 crc kubenswrapper[4876]: I1215 07:05:29.961694 4876 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1ebf5f89-26e8-4ee0-9de4-2c218574e6df-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:30 crc kubenswrapper[4876]: I1215 07:05:30.465449 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cccw6" Dec 15 07:05:30 crc kubenswrapper[4876]: I1215 07:05:30.465448 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-cccw6" event={"ID":"1ebf5f89-26e8-4ee0-9de4-2c218574e6df","Type":"ContainerDied","Data":"582625cbfa624d8f8ff1e114fa86a0a234576713081d3c65ac08651dfbc381d9"} Dec 15 07:05:30 crc kubenswrapper[4876]: I1215 07:05:30.465675 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="582625cbfa624d8f8ff1e114fa86a0a234576713081d3c65ac08651dfbc381d9" Dec 15 07:05:32 crc kubenswrapper[4876]: I1215 07:05:32.887390 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-7z6hx" Dec 15 07:05:33 crc kubenswrapper[4876]: I1215 07:05:33.305887 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:33 crc kubenswrapper[4876]: I1215 07:05:33.305949 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:34 crc kubenswrapper[4876]: I1215 07:05:34.344655 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4wq9n" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerName="registry-server" probeResult="failure" output=< Dec 15 07:05:34 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 07:05:34 crc kubenswrapper[4876]: > Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.445205 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9"] Dec 15 07:05:36 crc kubenswrapper[4876]: E1215 07:05:36.445663 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ebf5f89-26e8-4ee0-9de4-2c218574e6df" containerName="storage" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.445673 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ebf5f89-26e8-4ee0-9de4-2c218574e6df" containerName="storage" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.445784 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ebf5f89-26e8-4ee0-9de4-2c218574e6df" containerName="storage" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.446625 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.453488 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.459436 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9"] Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.540509 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.540809 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.540841 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7qsn\" (UniqueName: \"kubernetes.io/projected/870ffce7-be32-45bc-811e-39cf12bdef1f-kube-api-access-g7qsn\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.641541 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.641610 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.641653 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7qsn\" (UniqueName: \"kubernetes.io/projected/870ffce7-be32-45bc-811e-39cf12bdef1f-kube-api-access-g7qsn\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.642526 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.642561 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.663235 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7qsn\" (UniqueName: \"kubernetes.io/projected/870ffce7-be32-45bc-811e-39cf12bdef1f-kube-api-access-g7qsn\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:36 crc kubenswrapper[4876]: I1215 07:05:36.798632 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:37 crc kubenswrapper[4876]: I1215 07:05:37.043604 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9"] Dec 15 07:05:37 crc kubenswrapper[4876]: I1215 07:05:37.502847 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" event={"ID":"870ffce7-be32-45bc-811e-39cf12bdef1f","Type":"ContainerStarted","Data":"d0f7be84c5f59cdfeceb361f76570934c0bcb2d0ab922feef19a230d81906c00"} Dec 15 07:05:37 crc kubenswrapper[4876]: I1215 07:05:37.502896 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" event={"ID":"870ffce7-be32-45bc-811e-39cf12bdef1f","Type":"ContainerStarted","Data":"8c9049e9f189db5a724a624ffbc69c89b947ba86ac5e6145bbcdd02a93a96135"} Dec 15 07:05:38 crc kubenswrapper[4876]: I1215 07:05:38.509863 4876 generic.go:334] "Generic (PLEG): container finished" podID="870ffce7-be32-45bc-811e-39cf12bdef1f" containerID="d0f7be84c5f59cdfeceb361f76570934c0bcb2d0ab922feef19a230d81906c00" exitCode=0 Dec 15 07:05:38 crc kubenswrapper[4876]: I1215 07:05:38.509926 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" event={"ID":"870ffce7-be32-45bc-811e-39cf12bdef1f","Type":"ContainerDied","Data":"d0f7be84c5f59cdfeceb361f76570934c0bcb2d0ab922feef19a230d81906c00"} Dec 15 07:05:41 crc kubenswrapper[4876]: I1215 07:05:41.528207 4876 generic.go:334] "Generic (PLEG): container finished" podID="870ffce7-be32-45bc-811e-39cf12bdef1f" containerID="370162c0fdb0484030d750a1a89bbe2373f7fa5850875731c4cdd3d7d27d8667" exitCode=0 Dec 15 07:05:41 crc kubenswrapper[4876]: I1215 07:05:41.528267 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" event={"ID":"870ffce7-be32-45bc-811e-39cf12bdef1f","Type":"ContainerDied","Data":"370162c0fdb0484030d750a1a89bbe2373f7fa5850875731c4cdd3d7d27d8667"} Dec 15 07:05:42 crc kubenswrapper[4876]: I1215 07:05:42.538034 4876 generic.go:334] "Generic (PLEG): container finished" podID="870ffce7-be32-45bc-811e-39cf12bdef1f" containerID="311297ab33c2d4bffed65f6c0f8be9314bb69e96e68ad0883155e091efc30ca7" exitCode=0 Dec 15 07:05:42 crc kubenswrapper[4876]: I1215 07:05:42.538192 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" event={"ID":"870ffce7-be32-45bc-811e-39cf12bdef1f","Type":"ContainerDied","Data":"311297ab33c2d4bffed65f6c0f8be9314bb69e96e68ad0883155e091efc30ca7"} Dec 15 07:05:43 crc kubenswrapper[4876]: I1215 07:05:43.345751 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:43 crc kubenswrapper[4876]: I1215 07:05:43.387078 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:43 crc kubenswrapper[4876]: I1215 07:05:43.679053 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4wq9n"] Dec 15 07:05:43 crc kubenswrapper[4876]: I1215 07:05:43.793273 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:43 crc kubenswrapper[4876]: I1215 07:05:43.953502 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-bundle\") pod \"870ffce7-be32-45bc-811e-39cf12bdef1f\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " Dec 15 07:05:43 crc kubenswrapper[4876]: I1215 07:05:43.953612 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7qsn\" (UniqueName: \"kubernetes.io/projected/870ffce7-be32-45bc-811e-39cf12bdef1f-kube-api-access-g7qsn\") pod \"870ffce7-be32-45bc-811e-39cf12bdef1f\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " Dec 15 07:05:43 crc kubenswrapper[4876]: I1215 07:05:43.953645 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-util\") pod \"870ffce7-be32-45bc-811e-39cf12bdef1f\" (UID: \"870ffce7-be32-45bc-811e-39cf12bdef1f\") " Dec 15 07:05:43 crc kubenswrapper[4876]: I1215 07:05:43.954887 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-bundle" (OuterVolumeSpecName: "bundle") pod "870ffce7-be32-45bc-811e-39cf12bdef1f" (UID: "870ffce7-be32-45bc-811e-39cf12bdef1f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:05:43 crc kubenswrapper[4876]: I1215 07:05:43.960389 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/870ffce7-be32-45bc-811e-39cf12bdef1f-kube-api-access-g7qsn" (OuterVolumeSpecName: "kube-api-access-g7qsn") pod "870ffce7-be32-45bc-811e-39cf12bdef1f" (UID: "870ffce7-be32-45bc-811e-39cf12bdef1f"). InnerVolumeSpecName "kube-api-access-g7qsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:05:43 crc kubenswrapper[4876]: I1215 07:05:43.965137 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-util" (OuterVolumeSpecName: "util") pod "870ffce7-be32-45bc-811e-39cf12bdef1f" (UID: "870ffce7-be32-45bc-811e-39cf12bdef1f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.055289 4876 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.055368 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7qsn\" (UniqueName: \"kubernetes.io/projected/870ffce7-be32-45bc-811e-39cf12bdef1f-kube-api-access-g7qsn\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.055397 4876 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/870ffce7-be32-45bc-811e-39cf12bdef1f-util\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.551418 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" event={"ID":"870ffce7-be32-45bc-811e-39cf12bdef1f","Type":"ContainerDied","Data":"8c9049e9f189db5a724a624ffbc69c89b947ba86ac5e6145bbcdd02a93a96135"} Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.551461 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9" Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.551492 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c9049e9f189db5a724a624ffbc69c89b947ba86ac5e6145bbcdd02a93a96135" Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.551587 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4wq9n" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerName="registry-server" containerID="cri-o://6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548" gracePeriod=2 Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.906456 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.973215 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-utilities\") pod \"2511bb97-1863-4f74-8a1a-b6957a95731b\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.973258 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-catalog-content\") pod \"2511bb97-1863-4f74-8a1a-b6957a95731b\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.973326 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gr6g\" (UniqueName: \"kubernetes.io/projected/2511bb97-1863-4f74-8a1a-b6957a95731b-kube-api-access-9gr6g\") pod \"2511bb97-1863-4f74-8a1a-b6957a95731b\" (UID: \"2511bb97-1863-4f74-8a1a-b6957a95731b\") " Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.975656 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-utilities" (OuterVolumeSpecName: "utilities") pod "2511bb97-1863-4f74-8a1a-b6957a95731b" (UID: "2511bb97-1863-4f74-8a1a-b6957a95731b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:05:44 crc kubenswrapper[4876]: I1215 07:05:44.978621 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2511bb97-1863-4f74-8a1a-b6957a95731b-kube-api-access-9gr6g" (OuterVolumeSpecName: "kube-api-access-9gr6g") pod "2511bb97-1863-4f74-8a1a-b6957a95731b" (UID: "2511bb97-1863-4f74-8a1a-b6957a95731b"). InnerVolumeSpecName "kube-api-access-9gr6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.073848 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gr6g\" (UniqueName: \"kubernetes.io/projected/2511bb97-1863-4f74-8a1a-b6957a95731b-kube-api-access-9gr6g\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.073884 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.109727 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2511bb97-1863-4f74-8a1a-b6957a95731b" (UID: "2511bb97-1863-4f74-8a1a-b6957a95731b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.174751 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2511bb97-1863-4f74-8a1a-b6957a95731b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.560351 4876 generic.go:334] "Generic (PLEG): container finished" podID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerID="6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548" exitCode=0 Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.560398 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq9n" event={"ID":"2511bb97-1863-4f74-8a1a-b6957a95731b","Type":"ContainerDied","Data":"6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548"} Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.560440 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4wq9n" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.560475 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4wq9n" event={"ID":"2511bb97-1863-4f74-8a1a-b6957a95731b","Type":"ContainerDied","Data":"083366d946876e523802c731f79520600b939e503272c48991504b148f5b649d"} Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.560501 4876 scope.go:117] "RemoveContainer" containerID="6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.579636 4876 scope.go:117] "RemoveContainer" containerID="4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.603753 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4wq9n"] Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.607238 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4wq9n"] Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.616289 4876 scope.go:117] "RemoveContainer" containerID="127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.630977 4876 scope.go:117] "RemoveContainer" containerID="6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548" Dec 15 07:05:45 crc kubenswrapper[4876]: E1215 07:05:45.631447 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548\": container with ID starting with 6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548 not found: ID does not exist" containerID="6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.631480 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548"} err="failed to get container status \"6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548\": rpc error: code = NotFound desc = could not find container \"6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548\": container with ID starting with 6939783fde1f108618da0c2bb18f74df02fa16f1d48ddaa5b312bfd747e7d548 not found: ID does not exist" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.631505 4876 scope.go:117] "RemoveContainer" containerID="4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789" Dec 15 07:05:45 crc kubenswrapper[4876]: E1215 07:05:45.631946 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789\": container with ID starting with 4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789 not found: ID does not exist" containerID="4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.631980 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789"} err="failed to get container status \"4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789\": rpc error: code = NotFound desc = could not find container \"4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789\": container with ID starting with 4116d1cdc5b093faee68c93d487cc96798b4e8434d830ad0c389f1455cb5e789 not found: ID does not exist" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.632002 4876 scope.go:117] "RemoveContainer" containerID="127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08" Dec 15 07:05:45 crc kubenswrapper[4876]: E1215 07:05:45.632359 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08\": container with ID starting with 127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08 not found: ID does not exist" containerID="127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08" Dec 15 07:05:45 crc kubenswrapper[4876]: I1215 07:05:45.632389 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08"} err="failed to get container status \"127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08\": rpc error: code = NotFound desc = could not find container \"127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08\": container with ID starting with 127031b4a456d7d924cdd1a0a2a88615b675cc9bd3dabe843240faf4fe3e2d08 not found: ID does not exist" Dec 15 07:05:45 crc kubenswrapper[4876]: E1215 07:05:45.668855 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2511bb97_1863_4f74_8a1a_b6957a95731b.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2511bb97_1863_4f74_8a1a_b6957a95731b.slice/crio-083366d946876e523802c731f79520600b939e503272c48991504b148f5b649d\": RecentStats: unable to find data in memory cache]" Dec 15 07:05:46 crc kubenswrapper[4876]: I1215 07:05:46.711721 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" path="/var/lib/kubelet/pods/2511bb97-1863-4f74-8a1a-b6957a95731b/volumes" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.192036 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-sww9s"] Dec 15 07:05:47 crc kubenswrapper[4876]: E1215 07:05:47.192262 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerName="extract-utilities" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.192277 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerName="extract-utilities" Dec 15 07:05:47 crc kubenswrapper[4876]: E1215 07:05:47.192291 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerName="extract-content" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.192298 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerName="extract-content" Dec 15 07:05:47 crc kubenswrapper[4876]: E1215 07:05:47.192304 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870ffce7-be32-45bc-811e-39cf12bdef1f" containerName="extract" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.192311 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="870ffce7-be32-45bc-811e-39cf12bdef1f" containerName="extract" Dec 15 07:05:47 crc kubenswrapper[4876]: E1215 07:05:47.192320 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870ffce7-be32-45bc-811e-39cf12bdef1f" containerName="pull" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.192326 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="870ffce7-be32-45bc-811e-39cf12bdef1f" containerName="pull" Dec 15 07:05:47 crc kubenswrapper[4876]: E1215 07:05:47.192338 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerName="registry-server" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.192346 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerName="registry-server" Dec 15 07:05:47 crc kubenswrapper[4876]: E1215 07:05:47.192357 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870ffce7-be32-45bc-811e-39cf12bdef1f" containerName="util" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.192364 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="870ffce7-be32-45bc-811e-39cf12bdef1f" containerName="util" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.192472 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2511bb97-1863-4f74-8a1a-b6957a95731b" containerName="registry-server" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.192486 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="870ffce7-be32-45bc-811e-39cf12bdef1f" containerName="extract" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.192893 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-sww9s" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.194618 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-99qm7" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.195317 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.195426 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.205298 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-sww9s"] Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.301151 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5gtb\" (UniqueName: \"kubernetes.io/projected/cd43d415-d5f1-47dc-be4a-cf2a0a4c6053-kube-api-access-k5gtb\") pod \"nmstate-operator-6769fb99d-sww9s\" (UID: \"cd43d415-d5f1-47dc-be4a-cf2a0a4c6053\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-sww9s" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.402642 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5gtb\" (UniqueName: \"kubernetes.io/projected/cd43d415-d5f1-47dc-be4a-cf2a0a4c6053-kube-api-access-k5gtb\") pod \"nmstate-operator-6769fb99d-sww9s\" (UID: \"cd43d415-d5f1-47dc-be4a-cf2a0a4c6053\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-sww9s" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.422633 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5gtb\" (UniqueName: \"kubernetes.io/projected/cd43d415-d5f1-47dc-be4a-cf2a0a4c6053-kube-api-access-k5gtb\") pod \"nmstate-operator-6769fb99d-sww9s\" (UID: \"cd43d415-d5f1-47dc-be4a-cf2a0a4c6053\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-sww9s" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.507717 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-sww9s" Dec 15 07:05:47 crc kubenswrapper[4876]: I1215 07:05:47.733815 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-sww9s"] Dec 15 07:05:48 crc kubenswrapper[4876]: I1215 07:05:48.577622 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-sww9s" event={"ID":"cd43d415-d5f1-47dc-be4a-cf2a0a4c6053","Type":"ContainerStarted","Data":"8ca7b4246dc716587e9c763b32197e44036711e49e9b54a1655a39968ac4bf15"} Dec 15 07:05:50 crc kubenswrapper[4876]: I1215 07:05:50.596512 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-sww9s" event={"ID":"cd43d415-d5f1-47dc-be4a-cf2a0a4c6053","Type":"ContainerStarted","Data":"93faff1c3d061fb763f0a61501cdd9df917a250ee1006e11d25e34f8529f79e4"} Dec 15 07:05:50 crc kubenswrapper[4876]: I1215 07:05:50.614271 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-6769fb99d-sww9s" podStartSLOduration=1.38544045 podStartE2EDuration="3.614246761s" podCreationTimestamp="2025-12-15 07:05:47 +0000 UTC" firstStartedPulling="2025-12-15 07:05:47.744966127 +0000 UTC m=+873.316109048" lastFinishedPulling="2025-12-15 07:05:49.973772448 +0000 UTC m=+875.544915359" observedRunningTime="2025-12-15 07:05:50.61354133 +0000 UTC m=+876.184684271" watchObservedRunningTime="2025-12-15 07:05:50.614246761 +0000 UTC m=+876.185389692" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.726570 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl"] Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.728266 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.729690 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-hdnrl" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.741362 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl"] Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.771258 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-hxr68"] Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.773094 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.776957 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.787041 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-zhr5r"] Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.787942 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.793305 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-hxr68"] Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.817042 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b9207aa3-de53-4d87-99fe-40c057111a80-dbus-socket\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.817504 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4vbv\" (UniqueName: \"kubernetes.io/projected/b9207aa3-de53-4d87-99fe-40c057111a80-kube-api-access-n4vbv\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.817601 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b9207aa3-de53-4d87-99fe-40c057111a80-ovs-socket\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.818407 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/56060de2-24fe-4d23-8332-4af3fe7c4c74-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-hxr68\" (UID: \"56060de2-24fe-4d23-8332-4af3fe7c4c74\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.818528 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkg5x\" (UniqueName: \"kubernetes.io/projected/56060de2-24fe-4d23-8332-4af3fe7c4c74-kube-api-access-vkg5x\") pod \"nmstate-webhook-f8fb84555-hxr68\" (UID: \"56060de2-24fe-4d23-8332-4af3fe7c4c74\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.818656 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b9207aa3-de53-4d87-99fe-40c057111a80-nmstate-lock\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.818746 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl2x9\" (UniqueName: \"kubernetes.io/projected/e3bf6a5a-3f30-406f-87d7-f01388bb4210-kube-api-access-rl2x9\") pod \"nmstate-metrics-7f7f7578db-r88rl\" (UID: \"e3bf6a5a-3f30-406f-87d7-f01388bb4210\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.858029 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx"] Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.858702 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.860942 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.861093 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-fwlfq" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.862194 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.872458 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx"] Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919241 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c28e7727-e87a-4dd3-ac79-1d737ca00132-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-8fmvx\" (UID: \"c28e7727-e87a-4dd3-ac79-1d737ca00132\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919284 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b9207aa3-de53-4d87-99fe-40c057111a80-dbus-socket\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919312 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4vbv\" (UniqueName: \"kubernetes.io/projected/b9207aa3-de53-4d87-99fe-40c057111a80-kube-api-access-n4vbv\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919332 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2qmf\" (UniqueName: \"kubernetes.io/projected/c28e7727-e87a-4dd3-ac79-1d737ca00132-kube-api-access-j2qmf\") pod \"nmstate-console-plugin-6ff7998486-8fmvx\" (UID: \"c28e7727-e87a-4dd3-ac79-1d737ca00132\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919359 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b9207aa3-de53-4d87-99fe-40c057111a80-ovs-socket\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919383 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/56060de2-24fe-4d23-8332-4af3fe7c4c74-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-hxr68\" (UID: \"56060de2-24fe-4d23-8332-4af3fe7c4c74\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919403 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkg5x\" (UniqueName: \"kubernetes.io/projected/56060de2-24fe-4d23-8332-4af3fe7c4c74-kube-api-access-vkg5x\") pod \"nmstate-webhook-f8fb84555-hxr68\" (UID: \"56060de2-24fe-4d23-8332-4af3fe7c4c74\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919420 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b9207aa3-de53-4d87-99fe-40c057111a80-nmstate-lock\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919442 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl2x9\" (UniqueName: \"kubernetes.io/projected/e3bf6a5a-3f30-406f-87d7-f01388bb4210-kube-api-access-rl2x9\") pod \"nmstate-metrics-7f7f7578db-r88rl\" (UID: \"e3bf6a5a-3f30-406f-87d7-f01388bb4210\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919473 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c28e7727-e87a-4dd3-ac79-1d737ca00132-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-8fmvx\" (UID: \"c28e7727-e87a-4dd3-ac79-1d737ca00132\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919565 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b9207aa3-de53-4d87-99fe-40c057111a80-ovs-socket\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.919948 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b9207aa3-de53-4d87-99fe-40c057111a80-dbus-socket\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.920086 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b9207aa3-de53-4d87-99fe-40c057111a80-nmstate-lock\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.924727 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/56060de2-24fe-4d23-8332-4af3fe7c4c74-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-hxr68\" (UID: \"56060de2-24fe-4d23-8332-4af3fe7c4c74\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.935953 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4vbv\" (UniqueName: \"kubernetes.io/projected/b9207aa3-de53-4d87-99fe-40c057111a80-kube-api-access-n4vbv\") pod \"nmstate-handler-zhr5r\" (UID: \"b9207aa3-de53-4d87-99fe-40c057111a80\") " pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.937147 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkg5x\" (UniqueName: \"kubernetes.io/projected/56060de2-24fe-4d23-8332-4af3fe7c4c74-kube-api-access-vkg5x\") pod \"nmstate-webhook-f8fb84555-hxr68\" (UID: \"56060de2-24fe-4d23-8332-4af3fe7c4c74\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" Dec 15 07:05:56 crc kubenswrapper[4876]: I1215 07:05:56.937863 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl2x9\" (UniqueName: \"kubernetes.io/projected/e3bf6a5a-3f30-406f-87d7-f01388bb4210-kube-api-access-rl2x9\") pod \"nmstate-metrics-7f7f7578db-r88rl\" (UID: \"e3bf6a5a-3f30-406f-87d7-f01388bb4210\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.020765 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2qmf\" (UniqueName: \"kubernetes.io/projected/c28e7727-e87a-4dd3-ac79-1d737ca00132-kube-api-access-j2qmf\") pod \"nmstate-console-plugin-6ff7998486-8fmvx\" (UID: \"c28e7727-e87a-4dd3-ac79-1d737ca00132\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.021165 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c28e7727-e87a-4dd3-ac79-1d737ca00132-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-8fmvx\" (UID: \"c28e7727-e87a-4dd3-ac79-1d737ca00132\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.021307 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c28e7727-e87a-4dd3-ac79-1d737ca00132-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-8fmvx\" (UID: \"c28e7727-e87a-4dd3-ac79-1d737ca00132\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:57 crc kubenswrapper[4876]: E1215 07:05:57.021330 4876 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 15 07:05:57 crc kubenswrapper[4876]: E1215 07:05:57.021506 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c28e7727-e87a-4dd3-ac79-1d737ca00132-plugin-serving-cert podName:c28e7727-e87a-4dd3-ac79-1d737ca00132 nodeName:}" failed. No retries permitted until 2025-12-15 07:05:57.521487685 +0000 UTC m=+883.092630596 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/c28e7727-e87a-4dd3-ac79-1d737ca00132-plugin-serving-cert") pod "nmstate-console-plugin-6ff7998486-8fmvx" (UID: "c28e7727-e87a-4dd3-ac79-1d737ca00132") : secret "plugin-serving-cert" not found Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.022312 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c28e7727-e87a-4dd3-ac79-1d737ca00132-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-8fmvx\" (UID: \"c28e7727-e87a-4dd3-ac79-1d737ca00132\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.037980 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-58845f6759-8kwxm"] Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.039029 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.043533 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.043927 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2qmf\" (UniqueName: \"kubernetes.io/projected/c28e7727-e87a-4dd3-ac79-1d737ca00132-kube-api-access-j2qmf\") pod \"nmstate-console-plugin-6ff7998486-8fmvx\" (UID: \"c28e7727-e87a-4dd3-ac79-1d737ca00132\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.058988 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-58845f6759-8kwxm"] Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.087764 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.104043 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.122778 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-console-config\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.122891 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/40807cf6-b7a3-4572-ae2c-2244838ef00e-console-oauth-config\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.122945 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt6t9\" (UniqueName: \"kubernetes.io/projected/40807cf6-b7a3-4572-ae2c-2244838ef00e-kube-api-access-tt6t9\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.123079 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-service-ca\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.123140 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/40807cf6-b7a3-4572-ae2c-2244838ef00e-console-serving-cert\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.123183 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-trusted-ca-bundle\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.123221 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-oauth-serving-cert\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.224587 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-service-ca\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.224632 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/40807cf6-b7a3-4572-ae2c-2244838ef00e-console-serving-cert\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.224674 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-trusted-ca-bundle\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.224706 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-oauth-serving-cert\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.224743 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-console-config\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.224779 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/40807cf6-b7a3-4572-ae2c-2244838ef00e-console-oauth-config\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.224802 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt6t9\" (UniqueName: \"kubernetes.io/projected/40807cf6-b7a3-4572-ae2c-2244838ef00e-kube-api-access-tt6t9\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.230059 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-service-ca\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.231741 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/40807cf6-b7a3-4572-ae2c-2244838ef00e-console-serving-cert\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.231757 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-oauth-serving-cert\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.231770 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-console-config\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.232252 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40807cf6-b7a3-4572-ae2c-2244838ef00e-trusted-ca-bundle\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.233186 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/40807cf6-b7a3-4572-ae2c-2244838ef00e-console-oauth-config\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.244461 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt6t9\" (UniqueName: \"kubernetes.io/projected/40807cf6-b7a3-4572-ae2c-2244838ef00e-kube-api-access-tt6t9\") pod \"console-58845f6759-8kwxm\" (UID: \"40807cf6-b7a3-4572-ae2c-2244838ef00e\") " pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.322462 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.322774 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.410314 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.528762 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c28e7727-e87a-4dd3-ac79-1d737ca00132-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-8fmvx\" (UID: \"c28e7727-e87a-4dd3-ac79-1d737ca00132\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.534326 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c28e7727-e87a-4dd3-ac79-1d737ca00132-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-8fmvx\" (UID: \"c28e7727-e87a-4dd3-ac79-1d737ca00132\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.626385 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl"] Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.634472 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-hxr68"] Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.636821 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-zhr5r" event={"ID":"b9207aa3-de53-4d87-99fe-40c057111a80","Type":"ContainerStarted","Data":"19d65e1e5ac0bdc541094614079127b49d6ea0d9543867c02ac0570afdd43c76"} Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.660303 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-58845f6759-8kwxm"] Dec 15 07:05:57 crc kubenswrapper[4876]: W1215 07:05:57.665164 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40807cf6_b7a3_4572_ae2c_2244838ef00e.slice/crio-c41b7c6b763ac26386e53153c479f8cae106cb07b12ef1fc8f8fc87f8a406059 WatchSource:0}: Error finding container c41b7c6b763ac26386e53153c479f8cae106cb07b12ef1fc8f8fc87f8a406059: Status 404 returned error can't find the container with id c41b7c6b763ac26386e53153c479f8cae106cb07b12ef1fc8f8fc87f8a406059 Dec 15 07:05:57 crc kubenswrapper[4876]: I1215 07:05:57.775352 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" Dec 15 07:05:58 crc kubenswrapper[4876]: I1215 07:05:58.178774 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx"] Dec 15 07:05:58 crc kubenswrapper[4876]: W1215 07:05:58.184254 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc28e7727_e87a_4dd3_ac79_1d737ca00132.slice/crio-5aad9b8d8d3fa788519ef259fd8a007c6a0b7247f538a70c5221bf923cd3a715 WatchSource:0}: Error finding container 5aad9b8d8d3fa788519ef259fd8a007c6a0b7247f538a70c5221bf923cd3a715: Status 404 returned error can't find the container with id 5aad9b8d8d3fa788519ef259fd8a007c6a0b7247f538a70c5221bf923cd3a715 Dec 15 07:05:58 crc kubenswrapper[4876]: I1215 07:05:58.645853 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" event={"ID":"56060de2-24fe-4d23-8332-4af3fe7c4c74","Type":"ContainerStarted","Data":"0dce8e0ae84169ac727b0593c6511063d7ab0647e0b6fa2ccedf23e6fdc9f796"} Dec 15 07:05:58 crc kubenswrapper[4876]: I1215 07:05:58.647218 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" event={"ID":"c28e7727-e87a-4dd3-ac79-1d737ca00132","Type":"ContainerStarted","Data":"5aad9b8d8d3fa788519ef259fd8a007c6a0b7247f538a70c5221bf923cd3a715"} Dec 15 07:05:58 crc kubenswrapper[4876]: I1215 07:05:58.648435 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-58845f6759-8kwxm" event={"ID":"40807cf6-b7a3-4572-ae2c-2244838ef00e","Type":"ContainerStarted","Data":"7055fccfbeb2ff238ce3eab8f748fec24371aaa153f21243c0f69c6a7688fb6f"} Dec 15 07:05:58 crc kubenswrapper[4876]: I1215 07:05:58.648467 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-58845f6759-8kwxm" event={"ID":"40807cf6-b7a3-4572-ae2c-2244838ef00e","Type":"ContainerStarted","Data":"c41b7c6b763ac26386e53153c479f8cae106cb07b12ef1fc8f8fc87f8a406059"} Dec 15 07:05:58 crc kubenswrapper[4876]: I1215 07:05:58.650477 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl" event={"ID":"e3bf6a5a-3f30-406f-87d7-f01388bb4210","Type":"ContainerStarted","Data":"2e92e87230b0b3e86637e592780f5e1a567bb054610e4d678ba5650f07fec3c4"} Dec 15 07:05:58 crc kubenswrapper[4876]: I1215 07:05:58.674185 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-58845f6759-8kwxm" podStartSLOduration=1.6741564759999998 podStartE2EDuration="1.674156476s" podCreationTimestamp="2025-12-15 07:05:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:05:58.668001458 +0000 UTC m=+884.239144369" watchObservedRunningTime="2025-12-15 07:05:58.674156476 +0000 UTC m=+884.245299387" Dec 15 07:06:00 crc kubenswrapper[4876]: I1215 07:06:00.666545 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl" event={"ID":"e3bf6a5a-3f30-406f-87d7-f01388bb4210","Type":"ContainerStarted","Data":"dde5c46c1b2429c9441ec58fe1e077c8aca770f3c4fba8010381af83db69aaaa"} Dec 15 07:06:00 crc kubenswrapper[4876]: I1215 07:06:00.668905 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" event={"ID":"56060de2-24fe-4d23-8332-4af3fe7c4c74","Type":"ContainerStarted","Data":"051c15df15107392feb7c191b4869d2c61a8feb2ed22c6611ce4c5f2aa4b96ca"} Dec 15 07:06:00 crc kubenswrapper[4876]: I1215 07:06:00.669019 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" Dec 15 07:06:00 crc kubenswrapper[4876]: I1215 07:06:00.672545 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" event={"ID":"c28e7727-e87a-4dd3-ac79-1d737ca00132","Type":"ContainerStarted","Data":"c037e489f0daf22ed87a052c4defb0b9df46e554466a5da362c847649734117e"} Dec 15 07:06:00 crc kubenswrapper[4876]: I1215 07:06:00.675459 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-zhr5r" event={"ID":"b9207aa3-de53-4d87-99fe-40c057111a80","Type":"ContainerStarted","Data":"3a733fa614513d14f8c750f6618943fed8716fbb78e372dd4af140abcfa5146a"} Dec 15 07:06:00 crc kubenswrapper[4876]: I1215 07:06:00.675593 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:06:00 crc kubenswrapper[4876]: I1215 07:06:00.693519 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" podStartSLOduration=2.784624295 podStartE2EDuration="4.693489709s" podCreationTimestamp="2025-12-15 07:05:56 +0000 UTC" firstStartedPulling="2025-12-15 07:05:57.649601568 +0000 UTC m=+883.220744479" lastFinishedPulling="2025-12-15 07:05:59.558466982 +0000 UTC m=+885.129609893" observedRunningTime="2025-12-15 07:06:00.689474002 +0000 UTC m=+886.260616933" watchObservedRunningTime="2025-12-15 07:06:00.693489709 +0000 UTC m=+886.264632620" Dec 15 07:06:00 crc kubenswrapper[4876]: I1215 07:06:00.731008 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-zhr5r" podStartSLOduration=2.3232167 podStartE2EDuration="4.730983685s" podCreationTimestamp="2025-12-15 07:05:56 +0000 UTC" firstStartedPulling="2025-12-15 07:05:57.128300898 +0000 UTC m=+882.699443809" lastFinishedPulling="2025-12-15 07:05:59.536067883 +0000 UTC m=+885.107210794" observedRunningTime="2025-12-15 07:06:00.726578837 +0000 UTC m=+886.297721788" watchObservedRunningTime="2025-12-15 07:06:00.730983685 +0000 UTC m=+886.302126636" Dec 15 07:06:00 crc kubenswrapper[4876]: I1215 07:06:00.752148 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-8fmvx" podStartSLOduration=2.526592732 podStartE2EDuration="4.752124507s" podCreationTimestamp="2025-12-15 07:05:56 +0000 UTC" firstStartedPulling="2025-12-15 07:05:58.186653055 +0000 UTC m=+883.757795966" lastFinishedPulling="2025-12-15 07:06:00.41218483 +0000 UTC m=+885.983327741" observedRunningTime="2025-12-15 07:06:00.748696038 +0000 UTC m=+886.319838969" watchObservedRunningTime="2025-12-15 07:06:00.752124507 +0000 UTC m=+886.323267428" Dec 15 07:06:02 crc kubenswrapper[4876]: I1215 07:06:02.689289 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl" event={"ID":"e3bf6a5a-3f30-406f-87d7-f01388bb4210","Type":"ContainerStarted","Data":"753e97de72123bf8a94771d6cd31a7c1e33587508a0e8cb4c2266ada7ac8842d"} Dec 15 07:06:02 crc kubenswrapper[4876]: I1215 07:06:02.723794 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-r88rl" podStartSLOduration=2.71777277 podStartE2EDuration="6.723763288s" podCreationTimestamp="2025-12-15 07:05:56 +0000 UTC" firstStartedPulling="2025-12-15 07:05:57.644700097 +0000 UTC m=+883.215843018" lastFinishedPulling="2025-12-15 07:06:01.650690625 +0000 UTC m=+887.221833536" observedRunningTime="2025-12-15 07:06:02.715903431 +0000 UTC m=+888.287046382" watchObservedRunningTime="2025-12-15 07:06:02.723763288 +0000 UTC m=+888.294906209" Dec 15 07:06:07 crc kubenswrapper[4876]: I1215 07:06:07.128934 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-zhr5r" Dec 15 07:06:07 crc kubenswrapper[4876]: I1215 07:06:07.411181 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:06:07 crc kubenswrapper[4876]: I1215 07:06:07.411377 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:06:07 crc kubenswrapper[4876]: I1215 07:06:07.417836 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:06:07 crc kubenswrapper[4876]: I1215 07:06:07.725973 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-58845f6759-8kwxm" Dec 15 07:06:07 crc kubenswrapper[4876]: I1215 07:06:07.785717 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-x6r8c"] Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.613808 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g8xs9"] Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.616232 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.621732 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g8xs9"] Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.741009 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-utilities\") pod \"certified-operators-g8xs9\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.741434 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppps9\" (UniqueName: \"kubernetes.io/projected/722e3c10-ef59-4054-8938-1d0a3a0c0997-kube-api-access-ppps9\") pod \"certified-operators-g8xs9\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.742010 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-catalog-content\") pod \"certified-operators-g8xs9\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.843710 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-catalog-content\") pod \"certified-operators-g8xs9\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.843792 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-utilities\") pod \"certified-operators-g8xs9\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.843822 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppps9\" (UniqueName: \"kubernetes.io/projected/722e3c10-ef59-4054-8938-1d0a3a0c0997-kube-api-access-ppps9\") pod \"certified-operators-g8xs9\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.844279 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-catalog-content\") pod \"certified-operators-g8xs9\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.844325 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-utilities\") pod \"certified-operators-g8xs9\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.863183 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppps9\" (UniqueName: \"kubernetes.io/projected/722e3c10-ef59-4054-8938-1d0a3a0c0997-kube-api-access-ppps9\") pod \"certified-operators-g8xs9\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:14 crc kubenswrapper[4876]: I1215 07:06:14.933483 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:15 crc kubenswrapper[4876]: I1215 07:06:15.441821 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g8xs9"] Dec 15 07:06:15 crc kubenswrapper[4876]: I1215 07:06:15.778711 4876 generic.go:334] "Generic (PLEG): container finished" podID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerID="3e54699e282505c8b65a1cc06951e2426faf909c76ef7bfa4162d637ad449bb9" exitCode=0 Dec 15 07:06:15 crc kubenswrapper[4876]: I1215 07:06:15.778832 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g8xs9" event={"ID":"722e3c10-ef59-4054-8938-1d0a3a0c0997","Type":"ContainerDied","Data":"3e54699e282505c8b65a1cc06951e2426faf909c76ef7bfa4162d637ad449bb9"} Dec 15 07:06:15 crc kubenswrapper[4876]: I1215 07:06:15.780805 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g8xs9" event={"ID":"722e3c10-ef59-4054-8938-1d0a3a0c0997","Type":"ContainerStarted","Data":"9048a4193673abaaf191d5197730f9d37daa090df66f11b3aec4bfd89f2dc8ec"} Dec 15 07:06:16 crc kubenswrapper[4876]: I1215 07:06:16.789029 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g8xs9" event={"ID":"722e3c10-ef59-4054-8938-1d0a3a0c0997","Type":"ContainerStarted","Data":"25efdb4db763ecf5a2510e4b85ef13379b2c18a0dc9877d216543a5b90384227"} Dec 15 07:06:17 crc kubenswrapper[4876]: I1215 07:06:17.281446 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-f8fb84555-hxr68" Dec 15 07:06:17 crc kubenswrapper[4876]: I1215 07:06:17.797800 4876 generic.go:334] "Generic (PLEG): container finished" podID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerID="25efdb4db763ecf5a2510e4b85ef13379b2c18a0dc9877d216543a5b90384227" exitCode=0 Dec 15 07:06:17 crc kubenswrapper[4876]: I1215 07:06:17.797839 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g8xs9" event={"ID":"722e3c10-ef59-4054-8938-1d0a3a0c0997","Type":"ContainerDied","Data":"25efdb4db763ecf5a2510e4b85ef13379b2c18a0dc9877d216543a5b90384227"} Dec 15 07:06:18 crc kubenswrapper[4876]: I1215 07:06:18.823392 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g8xs9" event={"ID":"722e3c10-ef59-4054-8938-1d0a3a0c0997","Type":"ContainerStarted","Data":"9b2dbf15c73447268549f2299c747721f37f70e5bb6547f74a72b2519ebe54bb"} Dec 15 07:06:18 crc kubenswrapper[4876]: I1215 07:06:18.847558 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g8xs9" podStartSLOduration=2.323464621 podStartE2EDuration="4.847516034s" podCreationTimestamp="2025-12-15 07:06:14 +0000 UTC" firstStartedPulling="2025-12-15 07:06:15.780933287 +0000 UTC m=+901.352076198" lastFinishedPulling="2025-12-15 07:06:18.30498469 +0000 UTC m=+903.876127611" observedRunningTime="2025-12-15 07:06:18.845383993 +0000 UTC m=+904.416526904" watchObservedRunningTime="2025-12-15 07:06:18.847516034 +0000 UTC m=+904.418658945" Dec 15 07:06:24 crc kubenswrapper[4876]: I1215 07:06:24.934296 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:24 crc kubenswrapper[4876]: I1215 07:06:24.934895 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:25 crc kubenswrapper[4876]: I1215 07:06:25.006178 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:25 crc kubenswrapper[4876]: I1215 07:06:25.916515 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:25 crc kubenswrapper[4876]: I1215 07:06:25.967208 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g8xs9"] Dec 15 07:06:27 crc kubenswrapper[4876]: I1215 07:06:27.322704 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:06:27 crc kubenswrapper[4876]: I1215 07:06:27.323345 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:06:27 crc kubenswrapper[4876]: I1215 07:06:27.878173 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g8xs9" podUID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerName="registry-server" containerID="cri-o://9b2dbf15c73447268549f2299c747721f37f70e5bb6547f74a72b2519ebe54bb" gracePeriod=2 Dec 15 07:06:28 crc kubenswrapper[4876]: I1215 07:06:28.888148 4876 generic.go:334] "Generic (PLEG): container finished" podID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerID="9b2dbf15c73447268549f2299c747721f37f70e5bb6547f74a72b2519ebe54bb" exitCode=0 Dec 15 07:06:28 crc kubenswrapper[4876]: I1215 07:06:28.888218 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g8xs9" event={"ID":"722e3c10-ef59-4054-8938-1d0a3a0c0997","Type":"ContainerDied","Data":"9b2dbf15c73447268549f2299c747721f37f70e5bb6547f74a72b2519ebe54bb"} Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.339621 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.371949 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6"] Dec 15 07:06:29 crc kubenswrapper[4876]: E1215 07:06:29.372198 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerName="registry-server" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.372212 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerName="registry-server" Dec 15 07:06:29 crc kubenswrapper[4876]: E1215 07:06:29.372232 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerName="extract-content" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.372240 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerName="extract-content" Dec 15 07:06:29 crc kubenswrapper[4876]: E1215 07:06:29.372261 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerName="extract-utilities" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.372268 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerName="extract-utilities" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.372382 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="722e3c10-ef59-4054-8938-1d0a3a0c0997" containerName="registry-server" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.373281 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.386962 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.410553 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6"] Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.446759 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppps9\" (UniqueName: \"kubernetes.io/projected/722e3c10-ef59-4054-8938-1d0a3a0c0997-kube-api-access-ppps9\") pod \"722e3c10-ef59-4054-8938-1d0a3a0c0997\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.446832 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-utilities\") pod \"722e3c10-ef59-4054-8938-1d0a3a0c0997\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.446988 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-catalog-content\") pod \"722e3c10-ef59-4054-8938-1d0a3a0c0997\" (UID: \"722e3c10-ef59-4054-8938-1d0a3a0c0997\") " Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.447416 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.447474 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlgdv\" (UniqueName: \"kubernetes.io/projected/eaddc445-5da1-4790-8af0-37a4ea5f3350-kube-api-access-xlgdv\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.447508 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.448533 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-utilities" (OuterVolumeSpecName: "utilities") pod "722e3c10-ef59-4054-8938-1d0a3a0c0997" (UID: "722e3c10-ef59-4054-8938-1d0a3a0c0997"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.453787 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/722e3c10-ef59-4054-8938-1d0a3a0c0997-kube-api-access-ppps9" (OuterVolumeSpecName: "kube-api-access-ppps9") pod "722e3c10-ef59-4054-8938-1d0a3a0c0997" (UID: "722e3c10-ef59-4054-8938-1d0a3a0c0997"). InnerVolumeSpecName "kube-api-access-ppps9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.537961 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "722e3c10-ef59-4054-8938-1d0a3a0c0997" (UID: "722e3c10-ef59-4054-8938-1d0a3a0c0997"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.548608 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.548724 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.548769 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlgdv\" (UniqueName: \"kubernetes.io/projected/eaddc445-5da1-4790-8af0-37a4ea5f3350-kube-api-access-xlgdv\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.548819 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppps9\" (UniqueName: \"kubernetes.io/projected/722e3c10-ef59-4054-8938-1d0a3a0c0997-kube-api-access-ppps9\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.548833 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.548843 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/722e3c10-ef59-4054-8938-1d0a3a0c0997-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.549266 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.549285 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.573438 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlgdv\" (UniqueName: \"kubernetes.io/projected/eaddc445-5da1-4790-8af0-37a4ea5f3350-kube-api-access-xlgdv\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.686504 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.868034 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6"] Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.894211 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" event={"ID":"eaddc445-5da1-4790-8af0-37a4ea5f3350","Type":"ContainerStarted","Data":"5e9aafee6f128eb1de8c9d1ddd47d04207dacabbee0ea44610b4cfa5ee7adeca"} Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.896568 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g8xs9" event={"ID":"722e3c10-ef59-4054-8938-1d0a3a0c0997","Type":"ContainerDied","Data":"9048a4193673abaaf191d5197730f9d37daa090df66f11b3aec4bfd89f2dc8ec"} Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.896606 4876 scope.go:117] "RemoveContainer" containerID="9b2dbf15c73447268549f2299c747721f37f70e5bb6547f74a72b2519ebe54bb" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.896739 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g8xs9" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.918078 4876 scope.go:117] "RemoveContainer" containerID="25efdb4db763ecf5a2510e4b85ef13379b2c18a0dc9877d216543a5b90384227" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.936473 4876 scope.go:117] "RemoveContainer" containerID="3e54699e282505c8b65a1cc06951e2426faf909c76ef7bfa4162d637ad449bb9" Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.941470 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g8xs9"] Dec 15 07:06:29 crc kubenswrapper[4876]: I1215 07:06:29.948020 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g8xs9"] Dec 15 07:06:30 crc kubenswrapper[4876]: I1215 07:06:30.714160 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="722e3c10-ef59-4054-8938-1d0a3a0c0997" path="/var/lib/kubelet/pods/722e3c10-ef59-4054-8938-1d0a3a0c0997/volumes" Dec 15 07:06:30 crc kubenswrapper[4876]: I1215 07:06:30.904926 4876 generic.go:334] "Generic (PLEG): container finished" podID="eaddc445-5da1-4790-8af0-37a4ea5f3350" containerID="bc8d9ba58558523cc71102ca3a3a51fd64333e48cd890978c73a683569e9b6b0" exitCode=0 Dec 15 07:06:30 crc kubenswrapper[4876]: I1215 07:06:30.904972 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" event={"ID":"eaddc445-5da1-4790-8af0-37a4ea5f3350","Type":"ContainerDied","Data":"bc8d9ba58558523cc71102ca3a3a51fd64333e48cd890978c73a683569e9b6b0"} Dec 15 07:06:32 crc kubenswrapper[4876]: I1215 07:06:32.830520 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-x6r8c" podUID="4771573b-f753-4de4-bfb5-7fe3608a0b53" containerName="console" containerID="cri-o://4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6" gracePeriod=15 Dec 15 07:06:32 crc kubenswrapper[4876]: I1215 07:06:32.921426 4876 generic.go:334] "Generic (PLEG): container finished" podID="eaddc445-5da1-4790-8af0-37a4ea5f3350" containerID="83e77ba9506ae39cb6ef1a1b60d0d334806ff685e5c0db4ef85ff8046c19d6c2" exitCode=0 Dec 15 07:06:32 crc kubenswrapper[4876]: I1215 07:06:32.921479 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" event={"ID":"eaddc445-5da1-4790-8af0-37a4ea5f3350","Type":"ContainerDied","Data":"83e77ba9506ae39cb6ef1a1b60d0d334806ff685e5c0db4ef85ff8046c19d6c2"} Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.171417 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-x6r8c_4771573b-f753-4de4-bfb5-7fe3608a0b53/console/0.log" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.171489 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.198751 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-service-ca\") pod \"4771573b-f753-4de4-bfb5-7fe3608a0b53\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199050 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-config\") pod \"4771573b-f753-4de4-bfb5-7fe3608a0b53\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199165 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-serving-cert\") pod \"4771573b-f753-4de4-bfb5-7fe3608a0b53\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199223 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-oauth-serving-cert\") pod \"4771573b-f753-4de4-bfb5-7fe3608a0b53\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199325 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbvsx\" (UniqueName: \"kubernetes.io/projected/4771573b-f753-4de4-bfb5-7fe3608a0b53-kube-api-access-hbvsx\") pod \"4771573b-f753-4de4-bfb5-7fe3608a0b53\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199355 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-trusted-ca-bundle\") pod \"4771573b-f753-4de4-bfb5-7fe3608a0b53\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199382 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-oauth-config\") pod \"4771573b-f753-4de4-bfb5-7fe3608a0b53\" (UID: \"4771573b-f753-4de4-bfb5-7fe3608a0b53\") " Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199463 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-service-ca" (OuterVolumeSpecName: "service-ca") pod "4771573b-f753-4de4-bfb5-7fe3608a0b53" (UID: "4771573b-f753-4de4-bfb5-7fe3608a0b53"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199514 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-config" (OuterVolumeSpecName: "console-config") pod "4771573b-f753-4de4-bfb5-7fe3608a0b53" (UID: "4771573b-f753-4de4-bfb5-7fe3608a0b53"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199814 4876 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-service-ca\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199837 4876 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.199872 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "4771573b-f753-4de4-bfb5-7fe3608a0b53" (UID: "4771573b-f753-4de4-bfb5-7fe3608a0b53"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.200222 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "4771573b-f753-4de4-bfb5-7fe3608a0b53" (UID: "4771573b-f753-4de4-bfb5-7fe3608a0b53"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.204609 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "4771573b-f753-4de4-bfb5-7fe3608a0b53" (UID: "4771573b-f753-4de4-bfb5-7fe3608a0b53"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.205702 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4771573b-f753-4de4-bfb5-7fe3608a0b53-kube-api-access-hbvsx" (OuterVolumeSpecName: "kube-api-access-hbvsx") pod "4771573b-f753-4de4-bfb5-7fe3608a0b53" (UID: "4771573b-f753-4de4-bfb5-7fe3608a0b53"). InnerVolumeSpecName "kube-api-access-hbvsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.206079 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "4771573b-f753-4de4-bfb5-7fe3608a0b53" (UID: "4771573b-f753-4de4-bfb5-7fe3608a0b53"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.301320 4876 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.301594 4876 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.301788 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbvsx\" (UniqueName: \"kubernetes.io/projected/4771573b-f753-4de4-bfb5-7fe3608a0b53-kube-api-access-hbvsx\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.301906 4876 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4771573b-f753-4de4-bfb5-7fe3608a0b53-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.301978 4876 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4771573b-f753-4de4-bfb5-7fe3608a0b53-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.930442 4876 generic.go:334] "Generic (PLEG): container finished" podID="eaddc445-5da1-4790-8af0-37a4ea5f3350" containerID="072d95c278ec99bf809d3f2ccc74e6cdd4614afca1a549ec7e94776a9bbe46ea" exitCode=0 Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.930520 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" event={"ID":"eaddc445-5da1-4790-8af0-37a4ea5f3350","Type":"ContainerDied","Data":"072d95c278ec99bf809d3f2ccc74e6cdd4614afca1a549ec7e94776a9bbe46ea"} Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.932958 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-x6r8c_4771573b-f753-4de4-bfb5-7fe3608a0b53/console/0.log" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.933011 4876 generic.go:334] "Generic (PLEG): container finished" podID="4771573b-f753-4de4-bfb5-7fe3608a0b53" containerID="4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6" exitCode=2 Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.933042 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x6r8c" event={"ID":"4771573b-f753-4de4-bfb5-7fe3608a0b53","Type":"ContainerDied","Data":"4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6"} Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.933075 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-x6r8c" event={"ID":"4771573b-f753-4de4-bfb5-7fe3608a0b53","Type":"ContainerDied","Data":"d0cb08802b7a56540e1a9f04d0e1677eea87ca0a806d778deda1039202b455a2"} Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.933132 4876 scope.go:117] "RemoveContainer" containerID="4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.933130 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-x6r8c" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.948609 4876 scope.go:117] "RemoveContainer" containerID="4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6" Dec 15 07:06:33 crc kubenswrapper[4876]: E1215 07:06:33.949234 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6\": container with ID starting with 4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6 not found: ID does not exist" containerID="4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.949280 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6"} err="failed to get container status \"4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6\": rpc error: code = NotFound desc = could not find container \"4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6\": container with ID starting with 4634425c322b695ed83fcf8e186cd2f3c3e1ae34059dc09e83423d3e869df1e6 not found: ID does not exist" Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.969656 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-x6r8c"] Dec 15 07:06:33 crc kubenswrapper[4876]: I1215 07:06:33.973352 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-x6r8c"] Dec 15 07:06:34 crc kubenswrapper[4876]: I1215 07:06:34.712975 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4771573b-f753-4de4-bfb5-7fe3608a0b53" path="/var/lib/kubelet/pods/4771573b-f753-4de4-bfb5-7fe3608a0b53/volumes" Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.149340 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.228212 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xlgdv\" (UniqueName: \"kubernetes.io/projected/eaddc445-5da1-4790-8af0-37a4ea5f3350-kube-api-access-xlgdv\") pod \"eaddc445-5da1-4790-8af0-37a4ea5f3350\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.228264 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-bundle\") pod \"eaddc445-5da1-4790-8af0-37a4ea5f3350\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.228331 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-util\") pod \"eaddc445-5da1-4790-8af0-37a4ea5f3350\" (UID: \"eaddc445-5da1-4790-8af0-37a4ea5f3350\") " Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.229219 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-bundle" (OuterVolumeSpecName: "bundle") pod "eaddc445-5da1-4790-8af0-37a4ea5f3350" (UID: "eaddc445-5da1-4790-8af0-37a4ea5f3350"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.232480 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaddc445-5da1-4790-8af0-37a4ea5f3350-kube-api-access-xlgdv" (OuterVolumeSpecName: "kube-api-access-xlgdv") pod "eaddc445-5da1-4790-8af0-37a4ea5f3350" (UID: "eaddc445-5da1-4790-8af0-37a4ea5f3350"). InnerVolumeSpecName "kube-api-access-xlgdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.245802 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-util" (OuterVolumeSpecName: "util") pod "eaddc445-5da1-4790-8af0-37a4ea5f3350" (UID: "eaddc445-5da1-4790-8af0-37a4ea5f3350"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.329892 4876 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-util\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.330209 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xlgdv\" (UniqueName: \"kubernetes.io/projected/eaddc445-5da1-4790-8af0-37a4ea5f3350-kube-api-access-xlgdv\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.330271 4876 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/eaddc445-5da1-4790-8af0-37a4ea5f3350-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.953822 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" event={"ID":"eaddc445-5da1-4790-8af0-37a4ea5f3350","Type":"ContainerDied","Data":"5e9aafee6f128eb1de8c9d1ddd47d04207dacabbee0ea44610b4cfa5ee7adeca"} Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.953879 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e9aafee6f128eb1de8c9d1ddd47d04207dacabbee0ea44610b4cfa5ee7adeca" Dec 15 07:06:35 crc kubenswrapper[4876]: I1215 07:06:35.953969 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.675693 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hmcqg"] Dec 15 07:06:37 crc kubenswrapper[4876]: E1215 07:06:37.676247 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaddc445-5da1-4790-8af0-37a4ea5f3350" containerName="pull" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.676264 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaddc445-5da1-4790-8af0-37a4ea5f3350" containerName="pull" Dec 15 07:06:37 crc kubenswrapper[4876]: E1215 07:06:37.676282 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4771573b-f753-4de4-bfb5-7fe3608a0b53" containerName="console" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.676289 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4771573b-f753-4de4-bfb5-7fe3608a0b53" containerName="console" Dec 15 07:06:37 crc kubenswrapper[4876]: E1215 07:06:37.676299 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaddc445-5da1-4790-8af0-37a4ea5f3350" containerName="util" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.676307 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaddc445-5da1-4790-8af0-37a4ea5f3350" containerName="util" Dec 15 07:06:37 crc kubenswrapper[4876]: E1215 07:06:37.676325 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaddc445-5da1-4790-8af0-37a4ea5f3350" containerName="extract" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.676333 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaddc445-5da1-4790-8af0-37a4ea5f3350" containerName="extract" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.676425 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4771573b-f753-4de4-bfb5-7fe3608a0b53" containerName="console" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.676441 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaddc445-5da1-4790-8af0-37a4ea5f3350" containerName="extract" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.677202 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.691849 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hmcqg"] Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.792932 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-utilities\") pod \"community-operators-hmcqg\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.793008 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-catalog-content\") pod \"community-operators-hmcqg\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.793078 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pskxr\" (UniqueName: \"kubernetes.io/projected/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-kube-api-access-pskxr\") pod \"community-operators-hmcqg\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.893801 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pskxr\" (UniqueName: \"kubernetes.io/projected/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-kube-api-access-pskxr\") pod \"community-operators-hmcqg\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.894071 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-utilities\") pod \"community-operators-hmcqg\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.894167 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-catalog-content\") pod \"community-operators-hmcqg\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.894651 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-utilities\") pod \"community-operators-hmcqg\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.894758 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-catalog-content\") pod \"community-operators-hmcqg\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.915704 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pskxr\" (UniqueName: \"kubernetes.io/projected/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-kube-api-access-pskxr\") pod \"community-operators-hmcqg\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:37 crc kubenswrapper[4876]: I1215 07:06:37.992681 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:38 crc kubenswrapper[4876]: I1215 07:06:38.219650 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hmcqg"] Dec 15 07:06:39 crc kubenswrapper[4876]: I1215 07:06:39.064021 4876 generic.go:334] "Generic (PLEG): container finished" podID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerID="d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a" exitCode=0 Dec 15 07:06:39 crc kubenswrapper[4876]: I1215 07:06:39.064144 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmcqg" event={"ID":"31c9f1e6-ef1c-4d47-8171-c2b36143bd84","Type":"ContainerDied","Data":"d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a"} Dec 15 07:06:39 crc kubenswrapper[4876]: I1215 07:06:39.064399 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmcqg" event={"ID":"31c9f1e6-ef1c-4d47-8171-c2b36143bd84","Type":"ContainerStarted","Data":"d058b4f4b862feafb50e2f6fc0c254ae3a8028e4a9fd9a26cc3aac5ab7edb742"} Dec 15 07:06:41 crc kubenswrapper[4876]: I1215 07:06:41.095478 4876 generic.go:334] "Generic (PLEG): container finished" podID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerID="830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857" exitCode=0 Dec 15 07:06:41 crc kubenswrapper[4876]: I1215 07:06:41.095569 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmcqg" event={"ID":"31c9f1e6-ef1c-4d47-8171-c2b36143bd84","Type":"ContainerDied","Data":"830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857"} Dec 15 07:06:42 crc kubenswrapper[4876]: I1215 07:06:42.103123 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmcqg" event={"ID":"31c9f1e6-ef1c-4d47-8171-c2b36143bd84","Type":"ContainerStarted","Data":"da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3"} Dec 15 07:06:42 crc kubenswrapper[4876]: I1215 07:06:42.120468 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hmcqg" podStartSLOduration=2.685920979 podStartE2EDuration="5.120447253s" podCreationTimestamp="2025-12-15 07:06:37 +0000 UTC" firstStartedPulling="2025-12-15 07:06:39.066176675 +0000 UTC m=+924.637319586" lastFinishedPulling="2025-12-15 07:06:41.500702949 +0000 UTC m=+927.071845860" observedRunningTime="2025-12-15 07:06:42.12032946 +0000 UTC m=+927.691472371" watchObservedRunningTime="2025-12-15 07:06:42.120447253 +0000 UTC m=+927.691590174" Dec 15 07:06:43 crc kubenswrapper[4876]: I1215 07:06:43.976851 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn"] Dec 15 07:06:43 crc kubenswrapper[4876]: I1215 07:06:43.978049 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:43 crc kubenswrapper[4876]: I1215 07:06:43.983398 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-5vfxl" Dec 15 07:06:43 crc kubenswrapper[4876]: I1215 07:06:43.983398 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 15 07:06:43 crc kubenswrapper[4876]: I1215 07:06:43.983403 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 15 07:06:43 crc kubenswrapper[4876]: I1215 07:06:43.984115 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 15 07:06:43 crc kubenswrapper[4876]: I1215 07:06:43.997027 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.014293 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn"] Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.115885 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e94fa05e-79a3-43ac-94c5-55fe38c57a17-webhook-cert\") pod \"metallb-operator-controller-manager-79c8fdf8b5-hxccn\" (UID: \"e94fa05e-79a3-43ac-94c5-55fe38c57a17\") " pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.115967 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p54l\" (UniqueName: \"kubernetes.io/projected/e94fa05e-79a3-43ac-94c5-55fe38c57a17-kube-api-access-9p54l\") pod \"metallb-operator-controller-manager-79c8fdf8b5-hxccn\" (UID: \"e94fa05e-79a3-43ac-94c5-55fe38c57a17\") " pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.116466 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e94fa05e-79a3-43ac-94c5-55fe38c57a17-apiservice-cert\") pod \"metallb-operator-controller-manager-79c8fdf8b5-hxccn\" (UID: \"e94fa05e-79a3-43ac-94c5-55fe38c57a17\") " pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.217968 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p54l\" (UniqueName: \"kubernetes.io/projected/e94fa05e-79a3-43ac-94c5-55fe38c57a17-kube-api-access-9p54l\") pod \"metallb-operator-controller-manager-79c8fdf8b5-hxccn\" (UID: \"e94fa05e-79a3-43ac-94c5-55fe38c57a17\") " pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.218084 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e94fa05e-79a3-43ac-94c5-55fe38c57a17-apiservice-cert\") pod \"metallb-operator-controller-manager-79c8fdf8b5-hxccn\" (UID: \"e94fa05e-79a3-43ac-94c5-55fe38c57a17\") " pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.218149 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e94fa05e-79a3-43ac-94c5-55fe38c57a17-webhook-cert\") pod \"metallb-operator-controller-manager-79c8fdf8b5-hxccn\" (UID: \"e94fa05e-79a3-43ac-94c5-55fe38c57a17\") " pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.225829 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e94fa05e-79a3-43ac-94c5-55fe38c57a17-webhook-cert\") pod \"metallb-operator-controller-manager-79c8fdf8b5-hxccn\" (UID: \"e94fa05e-79a3-43ac-94c5-55fe38c57a17\") " pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.232844 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns"] Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.234073 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.238289 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.238539 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.238646 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-5z6df" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.240480 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e94fa05e-79a3-43ac-94c5-55fe38c57a17-apiservice-cert\") pod \"metallb-operator-controller-manager-79c8fdf8b5-hxccn\" (UID: \"e94fa05e-79a3-43ac-94c5-55fe38c57a17\") " pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.322360 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p54l\" (UniqueName: \"kubernetes.io/projected/e94fa05e-79a3-43ac-94c5-55fe38c57a17-kube-api-access-9p54l\") pod \"metallb-operator-controller-manager-79c8fdf8b5-hxccn\" (UID: \"e94fa05e-79a3-43ac-94c5-55fe38c57a17\") " pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.328083 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/68e442ac-dc60-4346-a861-102a4dcc5c26-apiservice-cert\") pod \"metallb-operator-webhook-server-6687c58c98-v52ns\" (UID: \"68e442ac-dc60-4346-a861-102a4dcc5c26\") " pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.328160 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/68e442ac-dc60-4346-a861-102a4dcc5c26-webhook-cert\") pod \"metallb-operator-webhook-server-6687c58c98-v52ns\" (UID: \"68e442ac-dc60-4346-a861-102a4dcc5c26\") " pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.328188 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8h9f\" (UniqueName: \"kubernetes.io/projected/68e442ac-dc60-4346-a861-102a4dcc5c26-kube-api-access-t8h9f\") pod \"metallb-operator-webhook-server-6687c58c98-v52ns\" (UID: \"68e442ac-dc60-4346-a861-102a4dcc5c26\") " pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.368584 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns"] Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.429636 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/68e442ac-dc60-4346-a861-102a4dcc5c26-apiservice-cert\") pod \"metallb-operator-webhook-server-6687c58c98-v52ns\" (UID: \"68e442ac-dc60-4346-a861-102a4dcc5c26\") " pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.429702 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/68e442ac-dc60-4346-a861-102a4dcc5c26-webhook-cert\") pod \"metallb-operator-webhook-server-6687c58c98-v52ns\" (UID: \"68e442ac-dc60-4346-a861-102a4dcc5c26\") " pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.429729 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8h9f\" (UniqueName: \"kubernetes.io/projected/68e442ac-dc60-4346-a861-102a4dcc5c26-kube-api-access-t8h9f\") pod \"metallb-operator-webhook-server-6687c58c98-v52ns\" (UID: \"68e442ac-dc60-4346-a861-102a4dcc5c26\") " pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.433215 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/68e442ac-dc60-4346-a861-102a4dcc5c26-webhook-cert\") pod \"metallb-operator-webhook-server-6687c58c98-v52ns\" (UID: \"68e442ac-dc60-4346-a861-102a4dcc5c26\") " pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.433711 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/68e442ac-dc60-4346-a861-102a4dcc5c26-apiservice-cert\") pod \"metallb-operator-webhook-server-6687c58c98-v52ns\" (UID: \"68e442ac-dc60-4346-a861-102a4dcc5c26\") " pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.476471 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8h9f\" (UniqueName: \"kubernetes.io/projected/68e442ac-dc60-4346-a861-102a4dcc5c26-kube-api-access-t8h9f\") pod \"metallb-operator-webhook-server-6687c58c98-v52ns\" (UID: \"68e442ac-dc60-4346-a861-102a4dcc5c26\") " pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.579312 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.593464 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:44 crc kubenswrapper[4876]: I1215 07:06:44.850379 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn"] Dec 15 07:06:45 crc kubenswrapper[4876]: I1215 07:06:45.114197 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns"] Dec 15 07:06:45 crc kubenswrapper[4876]: W1215 07:06:45.120539 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68e442ac_dc60_4346_a861_102a4dcc5c26.slice/crio-14afa9bc28bef551780339cc799ffa0f0cc14c0642841ab5e1ce4776f73f4a4d WatchSource:0}: Error finding container 14afa9bc28bef551780339cc799ffa0f0cc14c0642841ab5e1ce4776f73f4a4d: Status 404 returned error can't find the container with id 14afa9bc28bef551780339cc799ffa0f0cc14c0642841ab5e1ce4776f73f4a4d Dec 15 07:06:45 crc kubenswrapper[4876]: I1215 07:06:45.121312 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" event={"ID":"e94fa05e-79a3-43ac-94c5-55fe38c57a17","Type":"ContainerStarted","Data":"94ce28616b3ef2cf882f2251d46bdb23c942715c381cc69ea4b2f51548c2abe6"} Dec 15 07:06:46 crc kubenswrapper[4876]: I1215 07:06:46.128087 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" event={"ID":"68e442ac-dc60-4346-a861-102a4dcc5c26","Type":"ContainerStarted","Data":"14afa9bc28bef551780339cc799ffa0f0cc14c0642841ab5e1ce4776f73f4a4d"} Dec 15 07:06:48 crc kubenswrapper[4876]: I1215 07:06:48.004559 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:48 crc kubenswrapper[4876]: I1215 07:06:48.007753 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:48 crc kubenswrapper[4876]: I1215 07:06:48.070734 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:48 crc kubenswrapper[4876]: I1215 07:06:48.192158 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:49 crc kubenswrapper[4876]: I1215 07:06:49.151920 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" event={"ID":"e94fa05e-79a3-43ac-94c5-55fe38c57a17","Type":"ContainerStarted","Data":"d7c39ce45948f4daa836b399576f9026f51d2bfbce8f14e0f8aa6809b666149f"} Dec 15 07:06:49 crc kubenswrapper[4876]: I1215 07:06:49.152241 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:06:49 crc kubenswrapper[4876]: I1215 07:06:49.183284 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" podStartSLOduration=2.822963988 podStartE2EDuration="6.183264943s" podCreationTimestamp="2025-12-15 07:06:43 +0000 UTC" firstStartedPulling="2025-12-15 07:06:44.867323108 +0000 UTC m=+930.438466019" lastFinishedPulling="2025-12-15 07:06:48.227624063 +0000 UTC m=+933.798766974" observedRunningTime="2025-12-15 07:06:49.173912047 +0000 UTC m=+934.745054998" watchObservedRunningTime="2025-12-15 07:06:49.183264943 +0000 UTC m=+934.754407854" Dec 15 07:06:50 crc kubenswrapper[4876]: I1215 07:06:50.270146 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hmcqg"] Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.162360 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hmcqg" podUID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerName="registry-server" containerID="cri-o://da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3" gracePeriod=2 Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.704047 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.735125 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-utilities\") pod \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.735658 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pskxr\" (UniqueName: \"kubernetes.io/projected/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-kube-api-access-pskxr\") pod \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.735689 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-catalog-content\") pod \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\" (UID: \"31c9f1e6-ef1c-4d47-8171-c2b36143bd84\") " Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.736063 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-utilities" (OuterVolumeSpecName: "utilities") pod "31c9f1e6-ef1c-4d47-8171-c2b36143bd84" (UID: "31c9f1e6-ef1c-4d47-8171-c2b36143bd84"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.742723 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-kube-api-access-pskxr" (OuterVolumeSpecName: "kube-api-access-pskxr") pod "31c9f1e6-ef1c-4d47-8171-c2b36143bd84" (UID: "31c9f1e6-ef1c-4d47-8171-c2b36143bd84"). InnerVolumeSpecName "kube-api-access-pskxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.782734 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31c9f1e6-ef1c-4d47-8171-c2b36143bd84" (UID: "31c9f1e6-ef1c-4d47-8171-c2b36143bd84"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.836924 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.836976 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:51 crc kubenswrapper[4876]: I1215 07:06:51.836991 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pskxr\" (UniqueName: \"kubernetes.io/projected/31c9f1e6-ef1c-4d47-8171-c2b36143bd84-kube-api-access-pskxr\") on node \"crc\" DevicePath \"\"" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.168740 4876 generic.go:334] "Generic (PLEG): container finished" podID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerID="da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3" exitCode=0 Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.168790 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hmcqg" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.168803 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmcqg" event={"ID":"31c9f1e6-ef1c-4d47-8171-c2b36143bd84","Type":"ContainerDied","Data":"da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3"} Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.168828 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hmcqg" event={"ID":"31c9f1e6-ef1c-4d47-8171-c2b36143bd84","Type":"ContainerDied","Data":"d058b4f4b862feafb50e2f6fc0c254ae3a8028e4a9fd9a26cc3aac5ab7edb742"} Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.168854 4876 scope.go:117] "RemoveContainer" containerID="da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.170361 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" event={"ID":"68e442ac-dc60-4346-a861-102a4dcc5c26","Type":"ContainerStarted","Data":"4969cb7af8e0fc84beeff99c83316a31fd8e9b8c571be654f8201b6b74ec1a60"} Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.170941 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.185468 4876 scope.go:117] "RemoveContainer" containerID="830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.203203 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" podStartSLOduration=1.832054688 podStartE2EDuration="8.203179068s" podCreationTimestamp="2025-12-15 07:06:44 +0000 UTC" firstStartedPulling="2025-12-15 07:06:45.123741191 +0000 UTC m=+930.694884102" lastFinishedPulling="2025-12-15 07:06:51.494865571 +0000 UTC m=+937.066008482" observedRunningTime="2025-12-15 07:06:52.196066383 +0000 UTC m=+937.767209294" watchObservedRunningTime="2025-12-15 07:06:52.203179068 +0000 UTC m=+937.774321989" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.212154 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hmcqg"] Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.215684 4876 scope.go:117] "RemoveContainer" containerID="d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.218595 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hmcqg"] Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.229379 4876 scope.go:117] "RemoveContainer" containerID="da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3" Dec 15 07:06:52 crc kubenswrapper[4876]: E1215 07:06:52.229856 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3\": container with ID starting with da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3 not found: ID does not exist" containerID="da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.229901 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3"} err="failed to get container status \"da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3\": rpc error: code = NotFound desc = could not find container \"da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3\": container with ID starting with da9ea60c33017fc9f43ae5d679e9936bd4bd1b1d3f95bcc4538ba035f89d68e3 not found: ID does not exist" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.229932 4876 scope.go:117] "RemoveContainer" containerID="830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857" Dec 15 07:06:52 crc kubenswrapper[4876]: E1215 07:06:52.230453 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857\": container with ID starting with 830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857 not found: ID does not exist" containerID="830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.230503 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857"} err="failed to get container status \"830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857\": rpc error: code = NotFound desc = could not find container \"830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857\": container with ID starting with 830e24a1310cf7fffedc6366b6b11f0ab73c7f4716476fa9f97f2ab4897dc857 not found: ID does not exist" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.230546 4876 scope.go:117] "RemoveContainer" containerID="d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a" Dec 15 07:06:52 crc kubenswrapper[4876]: E1215 07:06:52.230874 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a\": container with ID starting with d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a not found: ID does not exist" containerID="d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.230908 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a"} err="failed to get container status \"d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a\": rpc error: code = NotFound desc = could not find container \"d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a\": container with ID starting with d0946d9104affe41c85566632f5e835f7749f73866fd999515517f32778bac4a not found: ID does not exist" Dec 15 07:06:52 crc kubenswrapper[4876]: I1215 07:06:52.722314 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" path="/var/lib/kubelet/pods/31c9f1e6-ef1c-4d47-8171-c2b36143bd84/volumes" Dec 15 07:06:57 crc kubenswrapper[4876]: I1215 07:06:57.322778 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:06:57 crc kubenswrapper[4876]: I1215 07:06:57.323196 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:06:57 crc kubenswrapper[4876]: I1215 07:06:57.323269 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:06:57 crc kubenswrapper[4876]: I1215 07:06:57.324141 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3a4b809547f569825abd99f382a2c171e3307d4226f6eeeddde69774b94f79df"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:06:57 crc kubenswrapper[4876]: I1215 07:06:57.324242 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://3a4b809547f569825abd99f382a2c171e3307d4226f6eeeddde69774b94f79df" gracePeriod=600 Dec 15 07:06:58 crc kubenswrapper[4876]: I1215 07:06:58.217833 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="3a4b809547f569825abd99f382a2c171e3307d4226f6eeeddde69774b94f79df" exitCode=0 Dec 15 07:06:58 crc kubenswrapper[4876]: I1215 07:06:58.217906 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"3a4b809547f569825abd99f382a2c171e3307d4226f6eeeddde69774b94f79df"} Dec 15 07:06:58 crc kubenswrapper[4876]: I1215 07:06:58.218494 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"4baa74438474f842b95837cb376641c83032001b2fc70604008ed449440b26a2"} Dec 15 07:06:58 crc kubenswrapper[4876]: I1215 07:06:58.218518 4876 scope.go:117] "RemoveContainer" containerID="0acc14dfc2de181ac94b53d2a5a6aa702465dae0249f92111bf270d15ac0837a" Dec 15 07:07:04 crc kubenswrapper[4876]: I1215 07:07:04.586860 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6687c58c98-v52ns" Dec 15 07:07:24 crc kubenswrapper[4876]: I1215 07:07:24.596619 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-79c8fdf8b5-hxccn" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.317888 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-hmmjw"] Dec 15 07:07:25 crc kubenswrapper[4876]: E1215 07:07:25.318126 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerName="registry-server" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.318142 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerName="registry-server" Dec 15 07:07:25 crc kubenswrapper[4876]: E1215 07:07:25.318155 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerName="extract-content" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.318161 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerName="extract-content" Dec 15 07:07:25 crc kubenswrapper[4876]: E1215 07:07:25.318170 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerName="extract-utilities" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.318176 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerName="extract-utilities" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.318281 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="31c9f1e6-ef1c-4d47-8171-c2b36143bd84" containerName="registry-server" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.319970 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.322328 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.326037 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2"] Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.326852 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.327746 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-gq8sg" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.327922 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.327955 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.350976 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2"] Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.400313 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-c5kqb"] Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.401223 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.405709 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-rx9l6" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.405804 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.406501 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.406706 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.409907 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5bddd4b946-9r84l"] Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.410845 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.418423 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.421500 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-9r84l"] Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.457595 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-reloader\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.457645 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-frr-startup\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.457663 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-memberlist\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.457684 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdzfl\" (UniqueName: \"kubernetes.io/projected/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-kube-api-access-vdzfl\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.457708 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfac572a-3220-4297-9706-d8a681d42852-cert\") pod \"controller-5bddd4b946-9r84l\" (UID: \"cfac572a-3220-4297-9706-d8a681d42852\") " pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.457896 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8ljq\" (UniqueName: \"kubernetes.io/projected/cfac572a-3220-4297-9706-d8a681d42852-kube-api-access-k8ljq\") pod \"controller-5bddd4b946-9r84l\" (UID: \"cfac572a-3220-4297-9706-d8a681d42852\") " pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.457952 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-metrics-certs\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.457979 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-metrics-certs\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.458060 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-metrics\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.458095 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpfgw\" (UniqueName: \"kubernetes.io/projected/704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0-kube-api-access-tpfgw\") pod \"frr-k8s-webhook-server-7784b6fcf-rjzg2\" (UID: \"704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.458143 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-rjzg2\" (UID: \"704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.458171 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x247\" (UniqueName: \"kubernetes.io/projected/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-kube-api-access-4x247\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.458191 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfac572a-3220-4297-9706-d8a681d42852-metrics-certs\") pod \"controller-5bddd4b946-9r84l\" (UID: \"cfac572a-3220-4297-9706-d8a681d42852\") " pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.458578 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-metallb-excludel2\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.458608 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-frr-conf\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.458662 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-frr-sockets\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.559523 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-metrics\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.559576 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpfgw\" (UniqueName: \"kubernetes.io/projected/704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0-kube-api-access-tpfgw\") pod \"frr-k8s-webhook-server-7784b6fcf-rjzg2\" (UID: \"704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.559596 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-rjzg2\" (UID: \"704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.559615 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x247\" (UniqueName: \"kubernetes.io/projected/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-kube-api-access-4x247\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560454 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfac572a-3220-4297-9706-d8a681d42852-metrics-certs\") pod \"controller-5bddd4b946-9r84l\" (UID: \"cfac572a-3220-4297-9706-d8a681d42852\") " pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560502 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-frr-conf\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560509 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-metrics\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560519 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-metallb-excludel2\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560616 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-frr-sockets\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560650 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-reloader\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560697 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-frr-startup\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560718 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-memberlist\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560743 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdzfl\" (UniqueName: \"kubernetes.io/projected/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-kube-api-access-vdzfl\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560781 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfac572a-3220-4297-9706-d8a681d42852-cert\") pod \"controller-5bddd4b946-9r84l\" (UID: \"cfac572a-3220-4297-9706-d8a681d42852\") " pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560821 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8ljq\" (UniqueName: \"kubernetes.io/projected/cfac572a-3220-4297-9706-d8a681d42852-kube-api-access-k8ljq\") pod \"controller-5bddd4b946-9r84l\" (UID: \"cfac572a-3220-4297-9706-d8a681d42852\") " pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560850 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-metrics-certs\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.560866 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-metrics-certs\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: E1215 07:07:25.561016 4876 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 15 07:07:25 crc kubenswrapper[4876]: E1215 07:07:25.561073 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-memberlist podName:cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24 nodeName:}" failed. No retries permitted until 2025-12-15 07:07:26.061057355 +0000 UTC m=+971.632200266 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-memberlist") pod "speaker-c5kqb" (UID: "cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24") : secret "metallb-memberlist" not found Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.561092 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-metallb-excludel2\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.561420 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-frr-conf\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.561448 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-frr-sockets\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.562070 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-frr-startup\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.564355 4876 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.565608 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-metrics-certs\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.565751 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-reloader\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.570725 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-rjzg2\" (UID: \"704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.576712 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfac572a-3220-4297-9706-d8a681d42852-metrics-certs\") pod \"controller-5bddd4b946-9r84l\" (UID: \"cfac572a-3220-4297-9706-d8a681d42852\") " pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.578231 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-metrics-certs\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.580090 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfac572a-3220-4297-9706-d8a681d42852-cert\") pod \"controller-5bddd4b946-9r84l\" (UID: \"cfac572a-3220-4297-9706-d8a681d42852\") " pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.591854 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdzfl\" (UniqueName: \"kubernetes.io/projected/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-kube-api-access-vdzfl\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.592125 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8ljq\" (UniqueName: \"kubernetes.io/projected/cfac572a-3220-4297-9706-d8a681d42852-kube-api-access-k8ljq\") pod \"controller-5bddd4b946-9r84l\" (UID: \"cfac572a-3220-4297-9706-d8a681d42852\") " pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.593059 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x247\" (UniqueName: \"kubernetes.io/projected/aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b-kube-api-access-4x247\") pod \"frr-k8s-hmmjw\" (UID: \"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b\") " pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.618910 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpfgw\" (UniqueName: \"kubernetes.io/projected/704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0-kube-api-access-tpfgw\") pod \"frr-k8s-webhook-server-7784b6fcf-rjzg2\" (UID: \"704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.641974 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.651904 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.727308 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:25 crc kubenswrapper[4876]: I1215 07:07:25.971066 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2"] Dec 15 07:07:26 crc kubenswrapper[4876]: I1215 07:07:26.070084 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-memberlist\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:26 crc kubenswrapper[4876]: E1215 07:07:26.070270 4876 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 15 07:07:26 crc kubenswrapper[4876]: E1215 07:07:26.070336 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-memberlist podName:cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24 nodeName:}" failed. No retries permitted until 2025-12-15 07:07:27.070319023 +0000 UTC m=+972.641461934 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-memberlist") pod "speaker-c5kqb" (UID: "cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24") : secret "metallb-memberlist" not found Dec 15 07:07:26 crc kubenswrapper[4876]: I1215 07:07:26.258916 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-9r84l"] Dec 15 07:07:26 crc kubenswrapper[4876]: W1215 07:07:26.270726 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfac572a_3220_4297_9706_d8a681d42852.slice/crio-929d8b807947e8181a7b295037026d3c8fb7f6c0485e867f42157a148aa48600 WatchSource:0}: Error finding container 929d8b807947e8181a7b295037026d3c8fb7f6c0485e867f42157a148aa48600: Status 404 returned error can't find the container with id 929d8b807947e8181a7b295037026d3c8fb7f6c0485e867f42157a148aa48600 Dec 15 07:07:26 crc kubenswrapper[4876]: I1215 07:07:26.374218 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-9r84l" event={"ID":"cfac572a-3220-4297-9706-d8a681d42852","Type":"ContainerStarted","Data":"929d8b807947e8181a7b295037026d3c8fb7f6c0485e867f42157a148aa48600"} Dec 15 07:07:26 crc kubenswrapper[4876]: I1215 07:07:26.375528 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" event={"ID":"704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0","Type":"ContainerStarted","Data":"406ba37a96db3009e8780620855f25071e6f18febb872b794ec23859bca8d207"} Dec 15 07:07:26 crc kubenswrapper[4876]: I1215 07:07:26.377296 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hmmjw" event={"ID":"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b","Type":"ContainerStarted","Data":"af443994b184424e954fd017f2a17cd75fc01c2623c556cf6c86e7323c8aa42f"} Dec 15 07:07:27 crc kubenswrapper[4876]: I1215 07:07:27.082159 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-memberlist\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:27 crc kubenswrapper[4876]: I1215 07:07:27.090892 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24-memberlist\") pod \"speaker-c5kqb\" (UID: \"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24\") " pod="metallb-system/speaker-c5kqb" Dec 15 07:07:27 crc kubenswrapper[4876]: I1215 07:07:27.215366 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-c5kqb" Dec 15 07:07:27 crc kubenswrapper[4876]: I1215 07:07:27.385550 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-9r84l" event={"ID":"cfac572a-3220-4297-9706-d8a681d42852","Type":"ContainerStarted","Data":"02f7ccac323041d28394cd5c95bfe03f18694de669d9ecd1ad1a835c605da995"} Dec 15 07:07:27 crc kubenswrapper[4876]: I1215 07:07:27.385602 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-9r84l" event={"ID":"cfac572a-3220-4297-9706-d8a681d42852","Type":"ContainerStarted","Data":"be2b225ea55901de97761462f22cf038b828f4fa03b2dc9e7607b00e7f8ede73"} Dec 15 07:07:27 crc kubenswrapper[4876]: I1215 07:07:27.385679 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:27 crc kubenswrapper[4876]: I1215 07:07:27.386758 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-c5kqb" event={"ID":"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24","Type":"ContainerStarted","Data":"949a2d551654a34f02d60009da71606d1f158116d31c6f6e6a912cdbf1801aa4"} Dec 15 07:07:27 crc kubenswrapper[4876]: I1215 07:07:27.410179 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5bddd4b946-9r84l" podStartSLOduration=2.410162498 podStartE2EDuration="2.410162498s" podCreationTimestamp="2025-12-15 07:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:07:27.399993479 +0000 UTC m=+972.971136410" watchObservedRunningTime="2025-12-15 07:07:27.410162498 +0000 UTC m=+972.981305399" Dec 15 07:07:28 crc kubenswrapper[4876]: I1215 07:07:28.402816 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-c5kqb" event={"ID":"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24","Type":"ContainerStarted","Data":"142ba778fb0c9d176d3a614693a94047cd88f6c992cdce5b78136bfd15a6475a"} Dec 15 07:07:28 crc kubenswrapper[4876]: I1215 07:07:28.404425 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-c5kqb" Dec 15 07:07:28 crc kubenswrapper[4876]: I1215 07:07:28.404528 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-c5kqb" event={"ID":"cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24","Type":"ContainerStarted","Data":"3a61acb15bd931744842eeb864c79f34621961b7f590d866cbbf288b99ba68c3"} Dec 15 07:07:28 crc kubenswrapper[4876]: I1215 07:07:28.423652 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-c5kqb" podStartSLOduration=3.423627606 podStartE2EDuration="3.423627606s" podCreationTimestamp="2025-12-15 07:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:07:28.422495363 +0000 UTC m=+973.993638274" watchObservedRunningTime="2025-12-15 07:07:28.423627606 +0000 UTC m=+973.994770517" Dec 15 07:07:34 crc kubenswrapper[4876]: I1215 07:07:34.449631 4876 generic.go:334] "Generic (PLEG): container finished" podID="aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b" containerID="7a536ef384f9e026f3f1e967b37b61db76028a40651d705577f3225913bd869b" exitCode=0 Dec 15 07:07:34 crc kubenswrapper[4876]: I1215 07:07:34.449737 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hmmjw" event={"ID":"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b","Type":"ContainerDied","Data":"7a536ef384f9e026f3f1e967b37b61db76028a40651d705577f3225913bd869b"} Dec 15 07:07:34 crc kubenswrapper[4876]: I1215 07:07:34.452582 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" event={"ID":"704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0","Type":"ContainerStarted","Data":"b9b51ebad09d243df289920e5530e932c6dc50d7c7f8739a973358fedf6b397e"} Dec 15 07:07:34 crc kubenswrapper[4876]: I1215 07:07:34.453239 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" Dec 15 07:07:34 crc kubenswrapper[4876]: I1215 07:07:34.498362 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" podStartSLOduration=1.222118384 podStartE2EDuration="9.498343292s" podCreationTimestamp="2025-12-15 07:07:25 +0000 UTC" firstStartedPulling="2025-12-15 07:07:25.983321794 +0000 UTC m=+971.554464705" lastFinishedPulling="2025-12-15 07:07:34.259546692 +0000 UTC m=+979.830689613" observedRunningTime="2025-12-15 07:07:34.491172949 +0000 UTC m=+980.062315860" watchObservedRunningTime="2025-12-15 07:07:34.498343292 +0000 UTC m=+980.069486203" Dec 15 07:07:35 crc kubenswrapper[4876]: I1215 07:07:35.459554 4876 generic.go:334] "Generic (PLEG): container finished" podID="aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b" containerID="cfc6dc6a30e9fcc526687411e905bef6fa59bfb6123469749f5ad75565df1cbf" exitCode=0 Dec 15 07:07:35 crc kubenswrapper[4876]: I1215 07:07:35.459596 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hmmjw" event={"ID":"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b","Type":"ContainerDied","Data":"cfc6dc6a30e9fcc526687411e905bef6fa59bfb6123469749f5ad75565df1cbf"} Dec 15 07:07:36 crc kubenswrapper[4876]: I1215 07:07:36.467955 4876 generic.go:334] "Generic (PLEG): container finished" podID="aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b" containerID="187169ad5b3d0c52ac7adf156c463124c3f5544217da5d0cb76b4248322bf94f" exitCode=0 Dec 15 07:07:36 crc kubenswrapper[4876]: I1215 07:07:36.468160 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hmmjw" event={"ID":"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b","Type":"ContainerDied","Data":"187169ad5b3d0c52ac7adf156c463124c3f5544217da5d0cb76b4248322bf94f"} Dec 15 07:07:37 crc kubenswrapper[4876]: I1215 07:07:37.219393 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-c5kqb" Dec 15 07:07:37 crc kubenswrapper[4876]: I1215 07:07:37.482706 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hmmjw" event={"ID":"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b","Type":"ContainerStarted","Data":"4c64094ba00281c0468a287a2aef92f547e91cf02124e18febb1581ce559f42e"} Dec 15 07:07:37 crc kubenswrapper[4876]: I1215 07:07:37.482755 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hmmjw" event={"ID":"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b","Type":"ContainerStarted","Data":"be241ecabb25344fbbe9f7a3023ace556054ffe8ec1614ed07615dba1c087d7d"} Dec 15 07:07:37 crc kubenswrapper[4876]: I1215 07:07:37.482769 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hmmjw" event={"ID":"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b","Type":"ContainerStarted","Data":"5ff64c2ecc99fb59412d6247824965a300b164900fc5617566a207e15823c4da"} Dec 15 07:07:37 crc kubenswrapper[4876]: I1215 07:07:37.482781 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hmmjw" event={"ID":"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b","Type":"ContainerStarted","Data":"f1120bbac275310e3b8b2c5526be65c44b25928944376ad6c3892b1cb60ae041"} Dec 15 07:07:37 crc kubenswrapper[4876]: I1215 07:07:37.482790 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hmmjw" event={"ID":"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b","Type":"ContainerStarted","Data":"8f55eea622f29b59522e15a6a44c4187a83de49bd6efca09b31308f617a38743"} Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.457840 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm"] Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.459036 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.471120 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm"] Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.471964 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.493396 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-hmmjw" event={"ID":"aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b","Type":"ContainerStarted","Data":"ea5c0a60e1aaecc70f2955b49b6aa70515b1c7f94a2ab94faa9484f238887c75"} Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.493554 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.512964 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-hmmjw" podStartSLOduration=5.277958032 podStartE2EDuration="13.512948825s" podCreationTimestamp="2025-12-15 07:07:25 +0000 UTC" firstStartedPulling="2025-12-15 07:07:26.032425623 +0000 UTC m=+971.603568544" lastFinishedPulling="2025-12-15 07:07:34.267416396 +0000 UTC m=+979.838559337" observedRunningTime="2025-12-15 07:07:38.511672129 +0000 UTC m=+984.082815050" watchObservedRunningTime="2025-12-15 07:07:38.512948825 +0000 UTC m=+984.084091736" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.559411 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.559486 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxp4q\" (UniqueName: \"kubernetes.io/projected/45a7dec0-345b-4689-b7b6-9d429648f610-kube-api-access-kxp4q\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.559549 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.660519 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxp4q\" (UniqueName: \"kubernetes.io/projected/45a7dec0-345b-4689-b7b6-9d429648f610-kube-api-access-kxp4q\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.660631 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.660704 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.661276 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.661413 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.682613 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxp4q\" (UniqueName: \"kubernetes.io/projected/45a7dec0-345b-4689-b7b6-9d429648f610-kube-api-access-kxp4q\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:38 crc kubenswrapper[4876]: I1215 07:07:38.772697 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:39 crc kubenswrapper[4876]: I1215 07:07:39.175682 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm"] Dec 15 07:07:39 crc kubenswrapper[4876]: I1215 07:07:39.499738 4876 generic.go:334] "Generic (PLEG): container finished" podID="45a7dec0-345b-4689-b7b6-9d429648f610" containerID="521eb914559f4b0291f6dda0acea7a1a768e905d5c22b128fd22dd50f0c96b39" exitCode=0 Dec 15 07:07:39 crc kubenswrapper[4876]: I1215 07:07:39.499818 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" event={"ID":"45a7dec0-345b-4689-b7b6-9d429648f610","Type":"ContainerDied","Data":"521eb914559f4b0291f6dda0acea7a1a768e905d5c22b128fd22dd50f0c96b39"} Dec 15 07:07:39 crc kubenswrapper[4876]: I1215 07:07:39.499882 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" event={"ID":"45a7dec0-345b-4689-b7b6-9d429648f610","Type":"ContainerStarted","Data":"5f4bf6c8a954fed06a12768114ccf6e0700ec6c064a73b4f1a5ca425f6bb21c6"} Dec 15 07:07:40 crc kubenswrapper[4876]: I1215 07:07:40.642626 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:40 crc kubenswrapper[4876]: I1215 07:07:40.677484 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:07:42 crc kubenswrapper[4876]: I1215 07:07:42.526351 4876 generic.go:334] "Generic (PLEG): container finished" podID="45a7dec0-345b-4689-b7b6-9d429648f610" containerID="ab7c28033a1269f1a375182db4dbc82dafd7ce4559ed0e664a0e447f88992833" exitCode=0 Dec 15 07:07:42 crc kubenswrapper[4876]: I1215 07:07:42.526466 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" event={"ID":"45a7dec0-345b-4689-b7b6-9d429648f610","Type":"ContainerDied","Data":"ab7c28033a1269f1a375182db4dbc82dafd7ce4559ed0e664a0e447f88992833"} Dec 15 07:07:43 crc kubenswrapper[4876]: I1215 07:07:43.534915 4876 generic.go:334] "Generic (PLEG): container finished" podID="45a7dec0-345b-4689-b7b6-9d429648f610" containerID="6632518b4eb123c57bd4c4dceed0a5faf909fba6971aa07f81b80ef43d1381e6" exitCode=0 Dec 15 07:07:43 crc kubenswrapper[4876]: I1215 07:07:43.535005 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" event={"ID":"45a7dec0-345b-4689-b7b6-9d429648f610","Type":"ContainerDied","Data":"6632518b4eb123c57bd4c4dceed0a5faf909fba6971aa07f81b80ef43d1381e6"} Dec 15 07:07:44 crc kubenswrapper[4876]: I1215 07:07:44.786601 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:44 crc kubenswrapper[4876]: I1215 07:07:44.943142 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-bundle\") pod \"45a7dec0-345b-4689-b7b6-9d429648f610\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " Dec 15 07:07:44 crc kubenswrapper[4876]: I1215 07:07:44.943211 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-util\") pod \"45a7dec0-345b-4689-b7b6-9d429648f610\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " Dec 15 07:07:44 crc kubenswrapper[4876]: I1215 07:07:44.943263 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxp4q\" (UniqueName: \"kubernetes.io/projected/45a7dec0-345b-4689-b7b6-9d429648f610-kube-api-access-kxp4q\") pod \"45a7dec0-345b-4689-b7b6-9d429648f610\" (UID: \"45a7dec0-345b-4689-b7b6-9d429648f610\") " Dec 15 07:07:44 crc kubenswrapper[4876]: I1215 07:07:44.944525 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-bundle" (OuterVolumeSpecName: "bundle") pod "45a7dec0-345b-4689-b7b6-9d429648f610" (UID: "45a7dec0-345b-4689-b7b6-9d429648f610"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:07:44 crc kubenswrapper[4876]: I1215 07:07:44.944808 4876 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:07:44 crc kubenswrapper[4876]: I1215 07:07:44.948507 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45a7dec0-345b-4689-b7b6-9d429648f610-kube-api-access-kxp4q" (OuterVolumeSpecName: "kube-api-access-kxp4q") pod "45a7dec0-345b-4689-b7b6-9d429648f610" (UID: "45a7dec0-345b-4689-b7b6-9d429648f610"). InnerVolumeSpecName "kube-api-access-kxp4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:07:44 crc kubenswrapper[4876]: I1215 07:07:44.954142 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-util" (OuterVolumeSpecName: "util") pod "45a7dec0-345b-4689-b7b6-9d429648f610" (UID: "45a7dec0-345b-4689-b7b6-9d429648f610"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:07:45 crc kubenswrapper[4876]: I1215 07:07:45.045720 4876 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/45a7dec0-345b-4689-b7b6-9d429648f610-util\") on node \"crc\" DevicePath \"\"" Dec 15 07:07:45 crc kubenswrapper[4876]: I1215 07:07:45.045784 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxp4q\" (UniqueName: \"kubernetes.io/projected/45a7dec0-345b-4689-b7b6-9d429648f610-kube-api-access-kxp4q\") on node \"crc\" DevicePath \"\"" Dec 15 07:07:45 crc kubenswrapper[4876]: I1215 07:07:45.552310 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" event={"ID":"45a7dec0-345b-4689-b7b6-9d429648f610","Type":"ContainerDied","Data":"5f4bf6c8a954fed06a12768114ccf6e0700ec6c064a73b4f1a5ca425f6bb21c6"} Dec 15 07:07:45 crc kubenswrapper[4876]: I1215 07:07:45.552376 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f4bf6c8a954fed06a12768114ccf6e0700ec6c064a73b4f1a5ca425f6bb21c6" Dec 15 07:07:45 crc kubenswrapper[4876]: I1215 07:07:45.552382 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm" Dec 15 07:07:45 crc kubenswrapper[4876]: I1215 07:07:45.659187 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-rjzg2" Dec 15 07:07:45 crc kubenswrapper[4876]: I1215 07:07:45.730897 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5bddd4b946-9r84l" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.366815 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9"] Dec 15 07:07:51 crc kubenswrapper[4876]: E1215 07:07:51.367700 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45a7dec0-345b-4689-b7b6-9d429648f610" containerName="pull" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.367717 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="45a7dec0-345b-4689-b7b6-9d429648f610" containerName="pull" Dec 15 07:07:51 crc kubenswrapper[4876]: E1215 07:07:51.367728 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45a7dec0-345b-4689-b7b6-9d429648f610" containerName="extract" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.367735 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="45a7dec0-345b-4689-b7b6-9d429648f610" containerName="extract" Dec 15 07:07:51 crc kubenswrapper[4876]: E1215 07:07:51.367750 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45a7dec0-345b-4689-b7b6-9d429648f610" containerName="util" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.367759 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="45a7dec0-345b-4689-b7b6-9d429648f610" containerName="util" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.367902 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="45a7dec0-345b-4689-b7b6-9d429648f610" containerName="extract" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.368455 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.372154 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.372404 4876 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-c82cl" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.372510 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.379406 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9"] Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.425903 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gjgd\" (UniqueName: \"kubernetes.io/projected/0640793b-9e6a-4e82-89e4-eaabcdcd0584-kube-api-access-9gjgd\") pod \"cert-manager-operator-controller-manager-64cf6dff88-92cs9\" (UID: \"0640793b-9e6a-4e82-89e4-eaabcdcd0584\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.426018 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/0640793b-9e6a-4e82-89e4-eaabcdcd0584-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-92cs9\" (UID: \"0640793b-9e6a-4e82-89e4-eaabcdcd0584\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.526918 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/0640793b-9e6a-4e82-89e4-eaabcdcd0584-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-92cs9\" (UID: \"0640793b-9e6a-4e82-89e4-eaabcdcd0584\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.527041 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gjgd\" (UniqueName: \"kubernetes.io/projected/0640793b-9e6a-4e82-89e4-eaabcdcd0584-kube-api-access-9gjgd\") pod \"cert-manager-operator-controller-manager-64cf6dff88-92cs9\" (UID: \"0640793b-9e6a-4e82-89e4-eaabcdcd0584\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.527436 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/0640793b-9e6a-4e82-89e4-eaabcdcd0584-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-92cs9\" (UID: \"0640793b-9e6a-4e82-89e4-eaabcdcd0584\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.546027 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gjgd\" (UniqueName: \"kubernetes.io/projected/0640793b-9e6a-4e82-89e4-eaabcdcd0584-kube-api-access-9gjgd\") pod \"cert-manager-operator-controller-manager-64cf6dff88-92cs9\" (UID: \"0640793b-9e6a-4e82-89e4-eaabcdcd0584\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.686361 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" Dec 15 07:07:51 crc kubenswrapper[4876]: I1215 07:07:51.891577 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9"] Dec 15 07:07:51 crc kubenswrapper[4876]: W1215 07:07:51.900722 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0640793b_9e6a_4e82_89e4_eaabcdcd0584.slice/crio-8ff5fd79b2a53093e9905deb5f5a2b669b44c20a6dcd234e25453b671891e608 WatchSource:0}: Error finding container 8ff5fd79b2a53093e9905deb5f5a2b669b44c20a6dcd234e25453b671891e608: Status 404 returned error can't find the container with id 8ff5fd79b2a53093e9905deb5f5a2b669b44c20a6dcd234e25453b671891e608 Dec 15 07:07:52 crc kubenswrapper[4876]: I1215 07:07:52.591253 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" event={"ID":"0640793b-9e6a-4e82-89e4-eaabcdcd0584","Type":"ContainerStarted","Data":"8ff5fd79b2a53093e9905deb5f5a2b669b44c20a6dcd234e25453b671891e608"} Dec 15 07:07:55 crc kubenswrapper[4876]: I1215 07:07:55.650386 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-hmmjw" Dec 15 07:08:01 crc kubenswrapper[4876]: I1215 07:08:01.645698 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" event={"ID":"0640793b-9e6a-4e82-89e4-eaabcdcd0584","Type":"ContainerStarted","Data":"7bb896b3daf3bd80f57534a69a79d553341c5287f56c72824503f8bc7fc71e81"} Dec 15 07:08:01 crc kubenswrapper[4876]: I1215 07:08:01.672638 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-92cs9" podStartSLOduration=1.914864702 podStartE2EDuration="10.672604701s" podCreationTimestamp="2025-12-15 07:07:51 +0000 UTC" firstStartedPulling="2025-12-15 07:07:51.903174427 +0000 UTC m=+997.474317338" lastFinishedPulling="2025-12-15 07:08:00.660914426 +0000 UTC m=+1006.232057337" observedRunningTime="2025-12-15 07:08:01.667404733 +0000 UTC m=+1007.238547674" watchObservedRunningTime="2025-12-15 07:08:01.672604701 +0000 UTC m=+1007.243747612" Dec 15 07:08:07 crc kubenswrapper[4876]: I1215 07:08:07.952898 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8"] Dec 15 07:08:07 crc kubenswrapper[4876]: I1215 07:08:07.954874 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" Dec 15 07:08:07 crc kubenswrapper[4876]: I1215 07:08:07.959192 4876 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-jn9pp" Dec 15 07:08:07 crc kubenswrapper[4876]: I1215 07:08:07.959635 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 15 07:08:07 crc kubenswrapper[4876]: I1215 07:08:07.959778 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 15 07:08:07 crc kubenswrapper[4876]: I1215 07:08:07.974683 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8"] Dec 15 07:08:07 crc kubenswrapper[4876]: I1215 07:08:07.977365 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0800dd6f-a26c-46bd-86a0-f2280aee8f0e-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-qtlm8\" (UID: \"0800dd6f-a26c-46bd-86a0-f2280aee8f0e\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" Dec 15 07:08:07 crc kubenswrapper[4876]: I1215 07:08:07.977426 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8gxv\" (UniqueName: \"kubernetes.io/projected/0800dd6f-a26c-46bd-86a0-f2280aee8f0e-kube-api-access-q8gxv\") pod \"cert-manager-cainjector-855d9ccff4-qtlm8\" (UID: \"0800dd6f-a26c-46bd-86a0-f2280aee8f0e\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" Dec 15 07:08:08 crc kubenswrapper[4876]: I1215 07:08:08.079006 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0800dd6f-a26c-46bd-86a0-f2280aee8f0e-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-qtlm8\" (UID: \"0800dd6f-a26c-46bd-86a0-f2280aee8f0e\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" Dec 15 07:08:08 crc kubenswrapper[4876]: I1215 07:08:08.079094 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8gxv\" (UniqueName: \"kubernetes.io/projected/0800dd6f-a26c-46bd-86a0-f2280aee8f0e-kube-api-access-q8gxv\") pod \"cert-manager-cainjector-855d9ccff4-qtlm8\" (UID: \"0800dd6f-a26c-46bd-86a0-f2280aee8f0e\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" Dec 15 07:08:08 crc kubenswrapper[4876]: I1215 07:08:08.105857 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8gxv\" (UniqueName: \"kubernetes.io/projected/0800dd6f-a26c-46bd-86a0-f2280aee8f0e-kube-api-access-q8gxv\") pod \"cert-manager-cainjector-855d9ccff4-qtlm8\" (UID: \"0800dd6f-a26c-46bd-86a0-f2280aee8f0e\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" Dec 15 07:08:08 crc kubenswrapper[4876]: I1215 07:08:08.109066 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0800dd6f-a26c-46bd-86a0-f2280aee8f0e-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-qtlm8\" (UID: \"0800dd6f-a26c-46bd-86a0-f2280aee8f0e\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" Dec 15 07:08:08 crc kubenswrapper[4876]: I1215 07:08:08.279411 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" Dec 15 07:08:09 crc kubenswrapper[4876]: I1215 07:08:09.139443 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8"] Dec 15 07:08:09 crc kubenswrapper[4876]: I1215 07:08:09.692344 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" event={"ID":"0800dd6f-a26c-46bd-86a0-f2280aee8f0e","Type":"ContainerStarted","Data":"4c18cf6ac318c8a97a1a0fe7730f6a227817914e62d56567821fca976269df77"} Dec 15 07:08:10 crc kubenswrapper[4876]: I1215 07:08:10.666707 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-v4w4z"] Dec 15 07:08:10 crc kubenswrapper[4876]: I1215 07:08:10.667971 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" Dec 15 07:08:10 crc kubenswrapper[4876]: I1215 07:08:10.669779 4876 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-knrks" Dec 15 07:08:10 crc kubenswrapper[4876]: I1215 07:08:10.682335 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-v4w4z"] Dec 15 07:08:10 crc kubenswrapper[4876]: I1215 07:08:10.718798 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/593d5e06-4f72-483e-ad51-5d2ac67055b3-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-v4w4z\" (UID: \"593d5e06-4f72-483e-ad51-5d2ac67055b3\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" Dec 15 07:08:10 crc kubenswrapper[4876]: I1215 07:08:10.718884 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dw6m\" (UniqueName: \"kubernetes.io/projected/593d5e06-4f72-483e-ad51-5d2ac67055b3-kube-api-access-7dw6m\") pod \"cert-manager-webhook-f4fb5df64-v4w4z\" (UID: \"593d5e06-4f72-483e-ad51-5d2ac67055b3\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" Dec 15 07:08:10 crc kubenswrapper[4876]: I1215 07:08:10.819792 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dw6m\" (UniqueName: \"kubernetes.io/projected/593d5e06-4f72-483e-ad51-5d2ac67055b3-kube-api-access-7dw6m\") pod \"cert-manager-webhook-f4fb5df64-v4w4z\" (UID: \"593d5e06-4f72-483e-ad51-5d2ac67055b3\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" Dec 15 07:08:10 crc kubenswrapper[4876]: I1215 07:08:10.819903 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/593d5e06-4f72-483e-ad51-5d2ac67055b3-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-v4w4z\" (UID: \"593d5e06-4f72-483e-ad51-5d2ac67055b3\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" Dec 15 07:08:10 crc kubenswrapper[4876]: I1215 07:08:10.848176 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dw6m\" (UniqueName: \"kubernetes.io/projected/593d5e06-4f72-483e-ad51-5d2ac67055b3-kube-api-access-7dw6m\") pod \"cert-manager-webhook-f4fb5df64-v4w4z\" (UID: \"593d5e06-4f72-483e-ad51-5d2ac67055b3\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" Dec 15 07:08:10 crc kubenswrapper[4876]: I1215 07:08:10.848187 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/593d5e06-4f72-483e-ad51-5d2ac67055b3-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-v4w4z\" (UID: \"593d5e06-4f72-483e-ad51-5d2ac67055b3\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" Dec 15 07:08:11 crc kubenswrapper[4876]: I1215 07:08:11.018908 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" Dec 15 07:08:11 crc kubenswrapper[4876]: I1215 07:08:11.462639 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-v4w4z"] Dec 15 07:08:11 crc kubenswrapper[4876]: W1215 07:08:11.472073 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod593d5e06_4f72_483e_ad51_5d2ac67055b3.slice/crio-fabea1ffd8b89b006b3d4cc218dfec85cbe61228a7d60e065a62a0ef60687d9d WatchSource:0}: Error finding container fabea1ffd8b89b006b3d4cc218dfec85cbe61228a7d60e065a62a0ef60687d9d: Status 404 returned error can't find the container with id fabea1ffd8b89b006b3d4cc218dfec85cbe61228a7d60e065a62a0ef60687d9d Dec 15 07:08:11 crc kubenswrapper[4876]: I1215 07:08:11.712939 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" event={"ID":"593d5e06-4f72-483e-ad51-5d2ac67055b3","Type":"ContainerStarted","Data":"fabea1ffd8b89b006b3d4cc218dfec85cbe61228a7d60e065a62a0ef60687d9d"} Dec 15 07:08:16 crc kubenswrapper[4876]: I1215 07:08:16.742142 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" event={"ID":"593d5e06-4f72-483e-ad51-5d2ac67055b3","Type":"ContainerStarted","Data":"195e0c33ec17e927efa49414595995109146a2b25ef21d6a37e4b853ef130bbf"} Dec 15 07:08:16 crc kubenswrapper[4876]: I1215 07:08:16.742773 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" Dec 15 07:08:16 crc kubenswrapper[4876]: I1215 07:08:16.743886 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" event={"ID":"0800dd6f-a26c-46bd-86a0-f2280aee8f0e","Type":"ContainerStarted","Data":"8d16afeff3cac41088ccc0cf6668e69eb3305d9e7582f374bd3f10ce925f278b"} Dec 15 07:08:16 crc kubenswrapper[4876]: I1215 07:08:16.778839 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-qtlm8" podStartSLOduration=3.009160675 podStartE2EDuration="9.778817201s" podCreationTimestamp="2025-12-15 07:08:07 +0000 UTC" firstStartedPulling="2025-12-15 07:08:09.142546426 +0000 UTC m=+1014.713689337" lastFinishedPulling="2025-12-15 07:08:15.912202942 +0000 UTC m=+1021.483345863" observedRunningTime="2025-12-15 07:08:16.776496155 +0000 UTC m=+1022.347639066" watchObservedRunningTime="2025-12-15 07:08:16.778817201 +0000 UTC m=+1022.349960132" Dec 15 07:08:16 crc kubenswrapper[4876]: I1215 07:08:16.779654 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" podStartSLOduration=2.375275439 podStartE2EDuration="6.779642935s" podCreationTimestamp="2025-12-15 07:08:10 +0000 UTC" firstStartedPulling="2025-12-15 07:08:11.475192868 +0000 UTC m=+1017.046335779" lastFinishedPulling="2025-12-15 07:08:15.879560344 +0000 UTC m=+1021.450703275" observedRunningTime="2025-12-15 07:08:16.761259312 +0000 UTC m=+1022.332402233" watchObservedRunningTime="2025-12-15 07:08:16.779642935 +0000 UTC m=+1022.350785856" Dec 15 07:08:21 crc kubenswrapper[4876]: I1215 07:08:21.021853 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-v4w4z" Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.183252 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-dlnr8"] Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.184287 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-dlnr8" Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.187920 4876 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-4295d" Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.205231 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-dlnr8"] Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.229719 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9vt4\" (UniqueName: \"kubernetes.io/projected/e9476c47-ae65-4abd-a748-87dddd55797d-kube-api-access-r9vt4\") pod \"cert-manager-86cb77c54b-dlnr8\" (UID: \"e9476c47-ae65-4abd-a748-87dddd55797d\") " pod="cert-manager/cert-manager-86cb77c54b-dlnr8" Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.229768 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e9476c47-ae65-4abd-a748-87dddd55797d-bound-sa-token\") pod \"cert-manager-86cb77c54b-dlnr8\" (UID: \"e9476c47-ae65-4abd-a748-87dddd55797d\") " pod="cert-manager/cert-manager-86cb77c54b-dlnr8" Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.331224 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9vt4\" (UniqueName: \"kubernetes.io/projected/e9476c47-ae65-4abd-a748-87dddd55797d-kube-api-access-r9vt4\") pod \"cert-manager-86cb77c54b-dlnr8\" (UID: \"e9476c47-ae65-4abd-a748-87dddd55797d\") " pod="cert-manager/cert-manager-86cb77c54b-dlnr8" Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.331263 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e9476c47-ae65-4abd-a748-87dddd55797d-bound-sa-token\") pod \"cert-manager-86cb77c54b-dlnr8\" (UID: \"e9476c47-ae65-4abd-a748-87dddd55797d\") " pod="cert-manager/cert-manager-86cb77c54b-dlnr8" Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.349558 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9vt4\" (UniqueName: \"kubernetes.io/projected/e9476c47-ae65-4abd-a748-87dddd55797d-kube-api-access-r9vt4\") pod \"cert-manager-86cb77c54b-dlnr8\" (UID: \"e9476c47-ae65-4abd-a748-87dddd55797d\") " pod="cert-manager/cert-manager-86cb77c54b-dlnr8" Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.350194 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e9476c47-ae65-4abd-a748-87dddd55797d-bound-sa-token\") pod \"cert-manager-86cb77c54b-dlnr8\" (UID: \"e9476c47-ae65-4abd-a748-87dddd55797d\") " pod="cert-manager/cert-manager-86cb77c54b-dlnr8" Dec 15 07:08:24 crc kubenswrapper[4876]: I1215 07:08:24.513822 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-dlnr8" Dec 15 07:08:25 crc kubenswrapper[4876]: I1215 07:08:25.047062 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-dlnr8"] Dec 15 07:08:25 crc kubenswrapper[4876]: I1215 07:08:25.808420 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-dlnr8" event={"ID":"e9476c47-ae65-4abd-a748-87dddd55797d","Type":"ContainerStarted","Data":"5e1aba8244f643b7ff8af96ddcccabb5172891430bb1d6608c5edc31be9eda39"} Dec 15 07:08:25 crc kubenswrapper[4876]: I1215 07:08:25.808462 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-dlnr8" event={"ID":"e9476c47-ae65-4abd-a748-87dddd55797d","Type":"ContainerStarted","Data":"cb62e835c270bf897da1f01b7d6faf7a764966133650148b957e78cf8c7967d6"} Dec 15 07:08:25 crc kubenswrapper[4876]: I1215 07:08:25.827644 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-dlnr8" podStartSLOduration=1.827624357 podStartE2EDuration="1.827624357s" podCreationTimestamp="2025-12-15 07:08:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:08:25.823852619 +0000 UTC m=+1031.394995560" watchObservedRunningTime="2025-12-15 07:08:25.827624357 +0000 UTC m=+1031.398767278" Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.146591 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-fpswf"] Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.148330 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fpswf" Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.151195 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-lcp68" Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.151713 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.151813 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.173490 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-fpswf"] Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.273535 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rsnn\" (UniqueName: \"kubernetes.io/projected/477f321e-ca62-46b6-a53d-a47265e44c73-kube-api-access-6rsnn\") pod \"openstack-operator-index-fpswf\" (UID: \"477f321e-ca62-46b6-a53d-a47265e44c73\") " pod="openstack-operators/openstack-operator-index-fpswf" Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.374794 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rsnn\" (UniqueName: \"kubernetes.io/projected/477f321e-ca62-46b6-a53d-a47265e44c73-kube-api-access-6rsnn\") pod \"openstack-operator-index-fpswf\" (UID: \"477f321e-ca62-46b6-a53d-a47265e44c73\") " pod="openstack-operators/openstack-operator-index-fpswf" Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.392345 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rsnn\" (UniqueName: \"kubernetes.io/projected/477f321e-ca62-46b6-a53d-a47265e44c73-kube-api-access-6rsnn\") pod \"openstack-operator-index-fpswf\" (UID: \"477f321e-ca62-46b6-a53d-a47265e44c73\") " pod="openstack-operators/openstack-operator-index-fpswf" Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.482273 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fpswf" Dec 15 07:08:34 crc kubenswrapper[4876]: I1215 07:08:34.880611 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-fpswf"] Dec 15 07:08:35 crc kubenswrapper[4876]: I1215 07:08:35.891371 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fpswf" event={"ID":"477f321e-ca62-46b6-a53d-a47265e44c73","Type":"ContainerStarted","Data":"46449cdccba71f3bbb11da6542c06bb84dd4adf8725d9a6622c07fce28265e89"} Dec 15 07:08:36 crc kubenswrapper[4876]: I1215 07:08:36.316998 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-fpswf"] Dec 15 07:08:36 crc kubenswrapper[4876]: I1215 07:08:36.723494 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-gr7bv"] Dec 15 07:08:36 crc kubenswrapper[4876]: I1215 07:08:36.725587 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-gr7bv" Dec 15 07:08:36 crc kubenswrapper[4876]: I1215 07:08:36.743454 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-gr7bv"] Dec 15 07:08:36 crc kubenswrapper[4876]: I1215 07:08:36.806561 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7phc\" (UniqueName: \"kubernetes.io/projected/65ad3502-fd7b-40e9-9d7c-8512f175e63e-kube-api-access-r7phc\") pod \"openstack-operator-index-gr7bv\" (UID: \"65ad3502-fd7b-40e9-9d7c-8512f175e63e\") " pod="openstack-operators/openstack-operator-index-gr7bv" Dec 15 07:08:36 crc kubenswrapper[4876]: I1215 07:08:36.907574 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7phc\" (UniqueName: \"kubernetes.io/projected/65ad3502-fd7b-40e9-9d7c-8512f175e63e-kube-api-access-r7phc\") pod \"openstack-operator-index-gr7bv\" (UID: \"65ad3502-fd7b-40e9-9d7c-8512f175e63e\") " pod="openstack-operators/openstack-operator-index-gr7bv" Dec 15 07:08:36 crc kubenswrapper[4876]: I1215 07:08:36.934401 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7phc\" (UniqueName: \"kubernetes.io/projected/65ad3502-fd7b-40e9-9d7c-8512f175e63e-kube-api-access-r7phc\") pod \"openstack-operator-index-gr7bv\" (UID: \"65ad3502-fd7b-40e9-9d7c-8512f175e63e\") " pod="openstack-operators/openstack-operator-index-gr7bv" Dec 15 07:08:37 crc kubenswrapper[4876]: I1215 07:08:37.062375 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-gr7bv" Dec 15 07:08:37 crc kubenswrapper[4876]: I1215 07:08:37.272024 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-gr7bv"] Dec 15 07:08:37 crc kubenswrapper[4876]: W1215 07:08:37.300644 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65ad3502_fd7b_40e9_9d7c_8512f175e63e.slice/crio-fef39f9ccd7899a2949ff708a54ec178ffd52686df8f77657b4e0e5c96c82c93 WatchSource:0}: Error finding container fef39f9ccd7899a2949ff708a54ec178ffd52686df8f77657b4e0e5c96c82c93: Status 404 returned error can't find the container with id fef39f9ccd7899a2949ff708a54ec178ffd52686df8f77657b4e0e5c96c82c93 Dec 15 07:08:37 crc kubenswrapper[4876]: I1215 07:08:37.915891 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-gr7bv" event={"ID":"65ad3502-fd7b-40e9-9d7c-8512f175e63e","Type":"ContainerStarted","Data":"828fccebc85ad8d4c3333248be9a72657e2829ba033d632bd331bffae04d9cff"} Dec 15 07:08:37 crc kubenswrapper[4876]: I1215 07:08:37.916226 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-gr7bv" event={"ID":"65ad3502-fd7b-40e9-9d7c-8512f175e63e","Type":"ContainerStarted","Data":"fef39f9ccd7899a2949ff708a54ec178ffd52686df8f77657b4e0e5c96c82c93"} Dec 15 07:08:37 crc kubenswrapper[4876]: I1215 07:08:37.917112 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fpswf" event={"ID":"477f321e-ca62-46b6-a53d-a47265e44c73","Type":"ContainerStarted","Data":"e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b"} Dec 15 07:08:37 crc kubenswrapper[4876]: I1215 07:08:37.917228 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-fpswf" podUID="477f321e-ca62-46b6-a53d-a47265e44c73" containerName="registry-server" containerID="cri-o://e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b" gracePeriod=2 Dec 15 07:08:37 crc kubenswrapper[4876]: I1215 07:08:37.931822 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-gr7bv" podStartSLOduration=1.519200517 podStartE2EDuration="1.931796805s" podCreationTimestamp="2025-12-15 07:08:36 +0000 UTC" firstStartedPulling="2025-12-15 07:08:37.30324591 +0000 UTC m=+1042.874388821" lastFinishedPulling="2025-12-15 07:08:37.715842208 +0000 UTC m=+1043.286985109" observedRunningTime="2025-12-15 07:08:37.927989529 +0000 UTC m=+1043.499132450" watchObservedRunningTime="2025-12-15 07:08:37.931796805 +0000 UTC m=+1043.502939736" Dec 15 07:08:37 crc kubenswrapper[4876]: I1215 07:08:37.949836 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-fpswf" podStartSLOduration=1.4938433340000001 podStartE2EDuration="3.949816536s" podCreationTimestamp="2025-12-15 07:08:34 +0000 UTC" firstStartedPulling="2025-12-15 07:08:34.885560221 +0000 UTC m=+1040.456703132" lastFinishedPulling="2025-12-15 07:08:37.341533423 +0000 UTC m=+1042.912676334" observedRunningTime="2025-12-15 07:08:37.945020572 +0000 UTC m=+1043.516163483" watchObservedRunningTime="2025-12-15 07:08:37.949816536 +0000 UTC m=+1043.520959447" Dec 15 07:08:38 crc kubenswrapper[4876]: E1215 07:08:38.023711 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod477f321e_ca62_46b6_a53d_a47265e44c73.slice/crio-e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b.scope\": RecentStats: unable to find data in memory cache]" Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.292073 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fpswf" Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.428369 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rsnn\" (UniqueName: \"kubernetes.io/projected/477f321e-ca62-46b6-a53d-a47265e44c73-kube-api-access-6rsnn\") pod \"477f321e-ca62-46b6-a53d-a47265e44c73\" (UID: \"477f321e-ca62-46b6-a53d-a47265e44c73\") " Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.434317 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477f321e-ca62-46b6-a53d-a47265e44c73-kube-api-access-6rsnn" (OuterVolumeSpecName: "kube-api-access-6rsnn") pod "477f321e-ca62-46b6-a53d-a47265e44c73" (UID: "477f321e-ca62-46b6-a53d-a47265e44c73"). InnerVolumeSpecName "kube-api-access-6rsnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.529662 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rsnn\" (UniqueName: \"kubernetes.io/projected/477f321e-ca62-46b6-a53d-a47265e44c73-kube-api-access-6rsnn\") on node \"crc\" DevicePath \"\"" Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.943577 4876 generic.go:334] "Generic (PLEG): container finished" podID="477f321e-ca62-46b6-a53d-a47265e44c73" containerID="e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b" exitCode=0 Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.943630 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fpswf" event={"ID":"477f321e-ca62-46b6-a53d-a47265e44c73","Type":"ContainerDied","Data":"e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b"} Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.943684 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fpswf" event={"ID":"477f321e-ca62-46b6-a53d-a47265e44c73","Type":"ContainerDied","Data":"46449cdccba71f3bbb11da6542c06bb84dd4adf8725d9a6622c07fce28265e89"} Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.943680 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fpswf" Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.943726 4876 scope.go:117] "RemoveContainer" containerID="e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b" Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.965262 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-fpswf"] Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.966998 4876 scope.go:117] "RemoveContainer" containerID="e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b" Dec 15 07:08:38 crc kubenswrapper[4876]: E1215 07:08:38.967481 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b\": container with ID starting with e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b not found: ID does not exist" containerID="e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b" Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.967523 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b"} err="failed to get container status \"e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b\": rpc error: code = NotFound desc = could not find container \"e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b\": container with ID starting with e31c2b25dfa11cd73a5da1154f97dca576ffccc2c6dfdf777c6f9b0f6f7e969b not found: ID does not exist" Dec 15 07:08:38 crc kubenswrapper[4876]: I1215 07:08:38.968869 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-fpswf"] Dec 15 07:08:40 crc kubenswrapper[4876]: I1215 07:08:40.714160 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477f321e-ca62-46b6-a53d-a47265e44c73" path="/var/lib/kubelet/pods/477f321e-ca62-46b6-a53d-a47265e44c73/volumes" Dec 15 07:08:47 crc kubenswrapper[4876]: I1215 07:08:47.063630 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-gr7bv" Dec 15 07:08:47 crc kubenswrapper[4876]: I1215 07:08:47.064173 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-gr7bv" Dec 15 07:08:47 crc kubenswrapper[4876]: I1215 07:08:47.112736 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-gr7bv" Dec 15 07:08:48 crc kubenswrapper[4876]: I1215 07:08:48.031960 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-gr7bv" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.365073 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b"] Dec 15 07:09:00 crc kubenswrapper[4876]: E1215 07:09:00.366367 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477f321e-ca62-46b6-a53d-a47265e44c73" containerName="registry-server" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.366385 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="477f321e-ca62-46b6-a53d-a47265e44c73" containerName="registry-server" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.366529 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="477f321e-ca62-46b6-a53d-a47265e44c73" containerName="registry-server" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.367795 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.370939 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-jdvxv" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.376478 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b"] Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.533636 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-util\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.533702 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-bundle\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.533832 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xg7h\" (UniqueName: \"kubernetes.io/projected/b41a0796-453e-4d66-81c7-5d7c44def86a-kube-api-access-2xg7h\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.634587 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-util\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.634633 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-bundle\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.634737 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xg7h\" (UniqueName: \"kubernetes.io/projected/b41a0796-453e-4d66-81c7-5d7c44def86a-kube-api-access-2xg7h\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.635155 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-util\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.635285 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-bundle\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.656703 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xg7h\" (UniqueName: \"kubernetes.io/projected/b41a0796-453e-4d66-81c7-5d7c44def86a-kube-api-access-2xg7h\") pod \"ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:00 crc kubenswrapper[4876]: I1215 07:09:00.684284 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:01 crc kubenswrapper[4876]: I1215 07:09:01.209632 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b"] Dec 15 07:09:02 crc kubenswrapper[4876]: I1215 07:09:02.109082 4876 generic.go:334] "Generic (PLEG): container finished" podID="b41a0796-453e-4d66-81c7-5d7c44def86a" containerID="6e73ddb39883ca039b08f93699c68e7bbcd1ce81d00b55dad99a7548cbfbd0f1" exitCode=0 Dec 15 07:09:02 crc kubenswrapper[4876]: I1215 07:09:02.109215 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" event={"ID":"b41a0796-453e-4d66-81c7-5d7c44def86a","Type":"ContainerDied","Data":"6e73ddb39883ca039b08f93699c68e7bbcd1ce81d00b55dad99a7548cbfbd0f1"} Dec 15 07:09:02 crc kubenswrapper[4876]: I1215 07:09:02.109505 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" event={"ID":"b41a0796-453e-4d66-81c7-5d7c44def86a","Type":"ContainerStarted","Data":"1141ffa72076debf5fd7eebbc6e0da87f1ee6e95ccb4231b060caa05534d55fa"} Dec 15 07:09:03 crc kubenswrapper[4876]: I1215 07:09:03.117828 4876 generic.go:334] "Generic (PLEG): container finished" podID="b41a0796-453e-4d66-81c7-5d7c44def86a" containerID="46474c7414f5f107c684027ddf1bcc1c83e5598116e5eb3350fdbba347f5338c" exitCode=0 Dec 15 07:09:03 crc kubenswrapper[4876]: I1215 07:09:03.118013 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" event={"ID":"b41a0796-453e-4d66-81c7-5d7c44def86a","Type":"ContainerDied","Data":"46474c7414f5f107c684027ddf1bcc1c83e5598116e5eb3350fdbba347f5338c"} Dec 15 07:09:04 crc kubenswrapper[4876]: I1215 07:09:04.125860 4876 generic.go:334] "Generic (PLEG): container finished" podID="b41a0796-453e-4d66-81c7-5d7c44def86a" containerID="354e9027d319d0f57b948de887788ef646c1723e6a0826e8098abb8d0747c41e" exitCode=0 Dec 15 07:09:04 crc kubenswrapper[4876]: I1215 07:09:04.125908 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" event={"ID":"b41a0796-453e-4d66-81c7-5d7c44def86a","Type":"ContainerDied","Data":"354e9027d319d0f57b948de887788ef646c1723e6a0826e8098abb8d0747c41e"} Dec 15 07:09:05 crc kubenswrapper[4876]: I1215 07:09:05.391717 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:05 crc kubenswrapper[4876]: I1215 07:09:05.495317 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-util\") pod \"b41a0796-453e-4d66-81c7-5d7c44def86a\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " Dec 15 07:09:05 crc kubenswrapper[4876]: I1215 07:09:05.495385 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-bundle\") pod \"b41a0796-453e-4d66-81c7-5d7c44def86a\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " Dec 15 07:09:05 crc kubenswrapper[4876]: I1215 07:09:05.495421 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xg7h\" (UniqueName: \"kubernetes.io/projected/b41a0796-453e-4d66-81c7-5d7c44def86a-kube-api-access-2xg7h\") pod \"b41a0796-453e-4d66-81c7-5d7c44def86a\" (UID: \"b41a0796-453e-4d66-81c7-5d7c44def86a\") " Dec 15 07:09:05 crc kubenswrapper[4876]: I1215 07:09:05.496083 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-bundle" (OuterVolumeSpecName: "bundle") pod "b41a0796-453e-4d66-81c7-5d7c44def86a" (UID: "b41a0796-453e-4d66-81c7-5d7c44def86a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:09:05 crc kubenswrapper[4876]: I1215 07:09:05.501211 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b41a0796-453e-4d66-81c7-5d7c44def86a-kube-api-access-2xg7h" (OuterVolumeSpecName: "kube-api-access-2xg7h") pod "b41a0796-453e-4d66-81c7-5d7c44def86a" (UID: "b41a0796-453e-4d66-81c7-5d7c44def86a"). InnerVolumeSpecName "kube-api-access-2xg7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:09:05 crc kubenswrapper[4876]: I1215 07:09:05.510638 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-util" (OuterVolumeSpecName: "util") pod "b41a0796-453e-4d66-81c7-5d7c44def86a" (UID: "b41a0796-453e-4d66-81c7-5d7c44def86a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:09:05 crc kubenswrapper[4876]: I1215 07:09:05.597198 4876 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-util\") on node \"crc\" DevicePath \"\"" Dec 15 07:09:05 crc kubenswrapper[4876]: I1215 07:09:05.597228 4876 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b41a0796-453e-4d66-81c7-5d7c44def86a-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:09:05 crc kubenswrapper[4876]: I1215 07:09:05.597238 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xg7h\" (UniqueName: \"kubernetes.io/projected/b41a0796-453e-4d66-81c7-5d7c44def86a-kube-api-access-2xg7h\") on node \"crc\" DevicePath \"\"" Dec 15 07:09:06 crc kubenswrapper[4876]: I1215 07:09:06.142664 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" event={"ID":"b41a0796-453e-4d66-81c7-5d7c44def86a","Type":"ContainerDied","Data":"1141ffa72076debf5fd7eebbc6e0da87f1ee6e95ccb4231b060caa05534d55fa"} Dec 15 07:09:06 crc kubenswrapper[4876]: I1215 07:09:06.142924 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1141ffa72076debf5fd7eebbc6e0da87f1ee6e95ccb4231b060caa05534d55fa" Dec 15 07:09:06 crc kubenswrapper[4876]: I1215 07:09:06.142722 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.447137 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s"] Dec 15 07:09:09 crc kubenswrapper[4876]: E1215 07:09:09.447726 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b41a0796-453e-4d66-81c7-5d7c44def86a" containerName="extract" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.447746 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b41a0796-453e-4d66-81c7-5d7c44def86a" containerName="extract" Dec 15 07:09:09 crc kubenswrapper[4876]: E1215 07:09:09.447765 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b41a0796-453e-4d66-81c7-5d7c44def86a" containerName="util" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.447774 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b41a0796-453e-4d66-81c7-5d7c44def86a" containerName="util" Dec 15 07:09:09 crc kubenswrapper[4876]: E1215 07:09:09.447794 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b41a0796-453e-4d66-81c7-5d7c44def86a" containerName="pull" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.447801 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b41a0796-453e-4d66-81c7-5d7c44def86a" containerName="pull" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.447931 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b41a0796-453e-4d66-81c7-5d7c44def86a" containerName="extract" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.448473 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.466746 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s"] Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.468212 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-bvnqh" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.566136 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkxq9\" (UniqueName: \"kubernetes.io/projected/b53f15be-f457-4006-9bc5-ad33291b58d5-kube-api-access-nkxq9\") pod \"openstack-operator-controller-operator-57bbbf4567-v9z5s\" (UID: \"b53f15be-f457-4006-9bc5-ad33291b58d5\") " pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.667492 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkxq9\" (UniqueName: \"kubernetes.io/projected/b53f15be-f457-4006-9bc5-ad33291b58d5-kube-api-access-nkxq9\") pod \"openstack-operator-controller-operator-57bbbf4567-v9z5s\" (UID: \"b53f15be-f457-4006-9bc5-ad33291b58d5\") " pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.688020 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkxq9\" (UniqueName: \"kubernetes.io/projected/b53f15be-f457-4006-9bc5-ad33291b58d5-kube-api-access-nkxq9\") pod \"openstack-operator-controller-operator-57bbbf4567-v9z5s\" (UID: \"b53f15be-f457-4006-9bc5-ad33291b58d5\") " pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s" Dec 15 07:09:09 crc kubenswrapper[4876]: I1215 07:09:09.772162 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s" Dec 15 07:09:10 crc kubenswrapper[4876]: I1215 07:09:10.097613 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s"] Dec 15 07:09:10 crc kubenswrapper[4876]: I1215 07:09:10.099176 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 07:09:10 crc kubenswrapper[4876]: I1215 07:09:10.166407 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s" event={"ID":"b53f15be-f457-4006-9bc5-ad33291b58d5","Type":"ContainerStarted","Data":"54b6024411ca45611cffa16914fade5e3455e2563b0192b2b2e468d4c90c92e3"} Dec 15 07:09:17 crc kubenswrapper[4876]: I1215 07:09:17.352551 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s" event={"ID":"b53f15be-f457-4006-9bc5-ad33291b58d5","Type":"ContainerStarted","Data":"55ae22cdbb39117f2e40821976e063e69cbb27696ec3a39829e407a8533d47e2"} Dec 15 07:09:17 crc kubenswrapper[4876]: I1215 07:09:17.353148 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s" Dec 15 07:09:17 crc kubenswrapper[4876]: I1215 07:09:17.388386 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s" podStartSLOduration=1.822698705 podStartE2EDuration="8.388365773s" podCreationTimestamp="2025-12-15 07:09:09 +0000 UTC" firstStartedPulling="2025-12-15 07:09:10.098957757 +0000 UTC m=+1075.670100668" lastFinishedPulling="2025-12-15 07:09:16.664624825 +0000 UTC m=+1082.235767736" observedRunningTime="2025-12-15 07:09:17.383611632 +0000 UTC m=+1082.954754543" watchObservedRunningTime="2025-12-15 07:09:17.388365773 +0000 UTC m=+1082.959508684" Dec 15 07:09:27 crc kubenswrapper[4876]: I1215 07:09:27.322877 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:09:27 crc kubenswrapper[4876]: I1215 07:09:27.323477 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:09:29 crc kubenswrapper[4876]: I1215 07:09:29.774665 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-57bbbf4567-v9z5s" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.550321 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-95949466-kdbsr"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.551694 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-95949466-kdbsr" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.556942 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-zwz9m" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.595859 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-95949466-kdbsr"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.631196 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.632311 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.636431 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-7jq9z" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.651616 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.665686 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.669212 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.674433 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-dkpdl" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.681980 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.682779 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t88z\" (UniqueName: \"kubernetes.io/projected/cbcc0e4f-8e39-4602-9c7a-512ff7ed2b87-kube-api-access-2t88z\") pod \"barbican-operator-controller-manager-95949466-kdbsr\" (UID: \"cbcc0e4f-8e39-4602-9c7a-512ff7ed2b87\") " pod="openstack-operators/barbican-operator-controller-manager-95949466-kdbsr" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.697055 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.697890 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.700681 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-7hdpd" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.720439 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.720473 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.721234 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.723533 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-g9lwn" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.723611 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.724379 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.736474 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.739779 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-c6mgp" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.739808 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.740502 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.742461 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-bp574" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.742640 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.757189 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.766682 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.785990 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc24v\" (UniqueName: \"kubernetes.io/projected/4b7cafd3-81e2-419f-879e-5761126af781-kube-api-access-rc24v\") pod \"cinder-operator-controller-manager-5cf45c46bd-djwl2\" (UID: \"4b7cafd3-81e2-419f-879e-5761126af781\") " pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.786291 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjb62\" (UniqueName: \"kubernetes.io/projected/c9c3edf2-0545-4a2f-9b53-53317c8c028b-kube-api-access-mjb62\") pod \"heat-operator-controller-manager-59b8dcb766-8k4s2\" (UID: \"c9c3edf2-0545-4a2f-9b53-53317c8c028b\") " pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.786377 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62gjb\" (UniqueName: \"kubernetes.io/projected/7bcbfa3a-5871-44db-b07b-2687a995b87f-kube-api-access-62gjb\") pod \"horizon-operator-controller-manager-6ccf486b9-ndjnl\" (UID: \"7bcbfa3a-5871-44db-b07b-2687a995b87f\") " pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.786496 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t88z\" (UniqueName: \"kubernetes.io/projected/cbcc0e4f-8e39-4602-9c7a-512ff7ed2b87-kube-api-access-2t88z\") pod \"barbican-operator-controller-manager-95949466-kdbsr\" (UID: \"cbcc0e4f-8e39-4602-9c7a-512ff7ed2b87\") " pod="openstack-operators/barbican-operator-controller-manager-95949466-kdbsr" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.786582 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpzlf\" (UniqueName: \"kubernetes.io/projected/6f2624c6-7e76-4413-b586-246d5cab2346-kube-api-access-vpzlf\") pod \"glance-operator-controller-manager-767f9d7567-tmckd\" (UID: \"6f2624c6-7e76-4413-b586-246d5cab2346\") " pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.786697 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw6nt\" (UniqueName: \"kubernetes.io/projected/7bc70792-76af-4d00-a2b2-2fdd6b89be16-kube-api-access-tw6nt\") pod \"designate-operator-controller-manager-66f8b87655-fjmj4\" (UID: \"7bc70792-76af-4d00-a2b2-2fdd6b89be16\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.794310 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.795697 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.801484 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.802775 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.806227 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-999t5" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.806445 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-98pvn" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.826460 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.830311 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.830490 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t88z\" (UniqueName: \"kubernetes.io/projected/cbcc0e4f-8e39-4602-9c7a-512ff7ed2b87-kube-api-access-2t88z\") pod \"barbican-operator-controller-manager-95949466-kdbsr\" (UID: \"cbcc0e4f-8e39-4602-9c7a-512ff7ed2b87\") " pod="openstack-operators/barbican-operator-controller-manager-95949466-kdbsr" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.831241 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.836960 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.840908 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.842054 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.849882 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-5pnmx" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.850183 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-fjc78" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.876591 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.887170 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.887874 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjb62\" (UniqueName: \"kubernetes.io/projected/c9c3edf2-0545-4a2f-9b53-53317c8c028b-kube-api-access-mjb62\") pod \"heat-operator-controller-manager-59b8dcb766-8k4s2\" (UID: \"c9c3edf2-0545-4a2f-9b53-53317c8c028b\") " pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.887976 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kss7k\" (UniqueName: \"kubernetes.io/projected/ff12b926-5daa-4bd3-b49a-154d80442fa8-kube-api-access-kss7k\") pod \"keystone-operator-controller-manager-5c7cbf548f-454ft\" (UID: \"ff12b926-5daa-4bd3-b49a-154d80442fa8\") " pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.888057 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.888163 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.888163 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62gjb\" (UniqueName: \"kubernetes.io/projected/7bcbfa3a-5871-44db-b07b-2687a995b87f-kube-api-access-62gjb\") pod \"horizon-operator-controller-manager-6ccf486b9-ndjnl\" (UID: \"7bcbfa3a-5871-44db-b07b-2687a995b87f\") " pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.888438 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r4pf\" (UniqueName: \"kubernetes.io/projected/70e313c3-63de-489b-bf09-6072d26ad862-kube-api-access-6r4pf\") pod \"ironic-operator-controller-manager-f458558d7-pcz5j\" (UID: \"70e313c3-63de-489b-bf09-6072d26ad862\") " pod="openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.888478 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpzlf\" (UniqueName: \"kubernetes.io/projected/6f2624c6-7e76-4413-b586-246d5cab2346-kube-api-access-vpzlf\") pod \"glance-operator-controller-manager-767f9d7567-tmckd\" (UID: \"6f2624c6-7e76-4413-b586-246d5cab2346\") " pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.888538 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm4z7\" (UniqueName: \"kubernetes.io/projected/f4c838cf-5a9f-445d-b43b-788ced3037cf-kube-api-access-lm4z7\") pod \"manila-operator-controller-manager-5fdd9786f7-b6nw6\" (UID: \"f4c838cf-5a9f-445d-b43b-788ced3037cf\") " pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.888569 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw6nt\" (UniqueName: \"kubernetes.io/projected/7bc70792-76af-4d00-a2b2-2fdd6b89be16-kube-api-access-tw6nt\") pod \"designate-operator-controller-manager-66f8b87655-fjmj4\" (UID: \"7bc70792-76af-4d00-a2b2-2fdd6b89be16\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.888709 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc24v\" (UniqueName: \"kubernetes.io/projected/4b7cafd3-81e2-419f-879e-5761126af781-kube-api-access-rc24v\") pod \"cinder-operator-controller-manager-5cf45c46bd-djwl2\" (UID: \"4b7cafd3-81e2-419f-879e-5761126af781\") " pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.888743 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqm9r\" (UniqueName: \"kubernetes.io/projected/ab87f935-876b-42b5-9eb9-85092cd8068f-kube-api-access-gqm9r\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.892824 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-zt626" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.913194 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.917182 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.925749 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.926558 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw6nt\" (UniqueName: \"kubernetes.io/projected/7bc70792-76af-4d00-a2b2-2fdd6b89be16-kube-api-access-tw6nt\") pod \"designate-operator-controller-manager-66f8b87655-fjmj4\" (UID: \"7bc70792-76af-4d00-a2b2-2fdd6b89be16\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.926813 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.927528 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc24v\" (UniqueName: \"kubernetes.io/projected/4b7cafd3-81e2-419f-879e-5761126af781-kube-api-access-rc24v\") pod \"cinder-operator-controller-manager-5cf45c46bd-djwl2\" (UID: \"4b7cafd3-81e2-419f-879e-5761126af781\") " pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.930210 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpzlf\" (UniqueName: \"kubernetes.io/projected/6f2624c6-7e76-4413-b586-246d5cab2346-kube-api-access-vpzlf\") pod \"glance-operator-controller-manager-767f9d7567-tmckd\" (UID: \"6f2624c6-7e76-4413-b586-246d5cab2346\") " pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.930527 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-9bd5q" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.931770 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62gjb\" (UniqueName: \"kubernetes.io/projected/7bcbfa3a-5871-44db-b07b-2687a995b87f-kube-api-access-62gjb\") pod \"horizon-operator-controller-manager-6ccf486b9-ndjnl\" (UID: \"7bcbfa3a-5871-44db-b07b-2687a995b87f\") " pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.932100 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-95949466-kdbsr" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.935637 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjb62\" (UniqueName: \"kubernetes.io/projected/c9c3edf2-0545-4a2f-9b53-53317c8c028b-kube-api-access-mjb62\") pod \"heat-operator-controller-manager-59b8dcb766-8k4s2\" (UID: \"c9c3edf2-0545-4a2f-9b53-53317c8c028b\") " pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.949606 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8"] Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.950609 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.955251 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-j85vv" Dec 15 07:09:54 crc kubenswrapper[4876]: I1215 07:09:54.961641 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.965031 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.971441 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.980640 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.981560 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.983444 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-xn4m6" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.983573 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.991407 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.996019 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsqmf\" (UniqueName: \"kubernetes.io/projected/f589b271-7169-4562-80b9-35f85f4ecfec-kube-api-access-rsqmf\") pod \"neutron-operator-controller-manager-7cd87b778f-v56p5\" (UID: \"f589b271-7169-4562-80b9-35f85f4ecfec\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.996175 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqm9r\" (UniqueName: \"kubernetes.io/projected/ab87f935-876b-42b5-9eb9-85092cd8068f-kube-api-access-gqm9r\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.996207 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9n8s\" (UniqueName: \"kubernetes.io/projected/cb583ab2-3f9a-42e3-afd3-630dd25eb152-kube-api-access-k9n8s\") pod \"mariadb-operator-controller-manager-f76f4954c-7qzqq\" (UID: \"cb583ab2-3f9a-42e3-afd3-630dd25eb152\") " pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.996334 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kss7k\" (UniqueName: \"kubernetes.io/projected/ff12b926-5daa-4bd3-b49a-154d80442fa8-kube-api-access-kss7k\") pod \"keystone-operator-controller-manager-5c7cbf548f-454ft\" (UID: \"ff12b926-5daa-4bd3-b49a-154d80442fa8\") " pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.996432 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.996467 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r4pf\" (UniqueName: \"kubernetes.io/projected/70e313c3-63de-489b-bf09-6072d26ad862-kube-api-access-6r4pf\") pod \"ironic-operator-controller-manager-f458558d7-pcz5j\" (UID: \"70e313c3-63de-489b-bf09-6072d26ad862\") " pod="openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.996577 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm4z7\" (UniqueName: \"kubernetes.io/projected/f4c838cf-5a9f-445d-b43b-788ced3037cf-kube-api-access-lm4z7\") pod \"manila-operator-controller-manager-5fdd9786f7-b6nw6\" (UID: \"f4c838cf-5a9f-445d-b43b-788ced3037cf\") " pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:54.996598 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk4pg\" (UniqueName: \"kubernetes.io/projected/85c28239-78ab-4206-b204-8fa54acf2968-kube-api-access-qk4pg\") pod \"octavia-operator-controller-manager-68c649d9d-2mxzz\" (UID: \"85c28239-78ab-4206-b204-8fa54acf2968\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.052549 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.058453 4876 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.058650 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert podName:ab87f935-876b-42b5-9eb9-85092cd8068f nodeName:}" failed. No retries permitted until 2025-12-15 07:09:55.558622146 +0000 UTC m=+1121.129765057 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert") pod "infra-operator-controller-manager-58944d7758-m5r6k" (UID: "ab87f935-876b-42b5-9eb9-85092cd8068f") : secret "infra-operator-webhook-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.062741 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.087767 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.122230 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.127530 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqm9r\" (UniqueName: \"kubernetes.io/projected/ab87f935-876b-42b5-9eb9-85092cd8068f-kube-api-access-gqm9r\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.129153 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r4pf\" (UniqueName: \"kubernetes.io/projected/70e313c3-63de-489b-bf09-6072d26ad862-kube-api-access-6r4pf\") pod \"ironic-operator-controller-manager-f458558d7-pcz5j\" (UID: \"70e313c3-63de-489b-bf09-6072d26ad862\") " pod="openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.131381 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kss7k\" (UniqueName: \"kubernetes.io/projected/ff12b926-5daa-4bd3-b49a-154d80442fa8-kube-api-access-kss7k\") pod \"keystone-operator-controller-manager-5c7cbf548f-454ft\" (UID: \"ff12b926-5daa-4bd3-b49a-154d80442fa8\") " pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.155016 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm4z7\" (UniqueName: \"kubernetes.io/projected/f4c838cf-5a9f-445d-b43b-788ced3037cf-kube-api-access-lm4z7\") pod \"manila-operator-controller-manager-5fdd9786f7-b6nw6\" (UID: \"f4c838cf-5a9f-445d-b43b-788ced3037cf\") " pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.155606 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9n8s\" (UniqueName: \"kubernetes.io/projected/cb583ab2-3f9a-42e3-afd3-630dd25eb152-kube-api-access-k9n8s\") pod \"mariadb-operator-controller-manager-f76f4954c-7qzqq\" (UID: \"cb583ab2-3f9a-42e3-afd3-630dd25eb152\") " pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.155718 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2crx\" (UniqueName: \"kubernetes.io/projected/e7ea8ece-c3d4-46bb-9255-6021533983c4-kube-api-access-x2crx\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.155757 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlfmj\" (UniqueName: \"kubernetes.io/projected/a590cf12-4e90-44fe-be6c-b392c7e22208-kube-api-access-wlfmj\") pod \"nova-operator-controller-manager-5fbbf8b6cc-9mmq8\" (UID: \"a590cf12-4e90-44fe-be6c-b392c7e22208\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.155804 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk4pg\" (UniqueName: \"kubernetes.io/projected/85c28239-78ab-4206-b204-8fa54acf2968-kube-api-access-qk4pg\") pod \"octavia-operator-controller-manager-68c649d9d-2mxzz\" (UID: \"85c28239-78ab-4206-b204-8fa54acf2968\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.155883 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.155967 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsqmf\" (UniqueName: \"kubernetes.io/projected/f589b271-7169-4562-80b9-35f85f4ecfec-kube-api-access-rsqmf\") pod \"neutron-operator-controller-manager-7cd87b778f-v56p5\" (UID: \"f589b271-7169-4562-80b9-35f85f4ecfec\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.159444 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.168846 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.179161 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.179370 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.181785 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-hrzbc" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.187485 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.189330 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.199293 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-dfx2p" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.202496 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk4pg\" (UniqueName: \"kubernetes.io/projected/85c28239-78ab-4206-b204-8fa54acf2968-kube-api-access-qk4pg\") pod \"octavia-operator-controller-manager-68c649d9d-2mxzz\" (UID: \"85c28239-78ab-4206-b204-8fa54acf2968\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.203704 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9n8s\" (UniqueName: \"kubernetes.io/projected/cb583ab2-3f9a-42e3-afd3-630dd25eb152-kube-api-access-k9n8s\") pod \"mariadb-operator-controller-manager-f76f4954c-7qzqq\" (UID: \"cb583ab2-3f9a-42e3-afd3-630dd25eb152\") " pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.203845 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsqmf\" (UniqueName: \"kubernetes.io/projected/f589b271-7169-4562-80b9-35f85f4ecfec-kube-api-access-rsqmf\") pod \"neutron-operator-controller-manager-7cd87b778f-v56p5\" (UID: \"f589b271-7169-4562-80b9-35f85f4ecfec\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.237924 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.252758 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.258162 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.259141 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.261487 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2crx\" (UniqueName: \"kubernetes.io/projected/e7ea8ece-c3d4-46bb-9255-6021533983c4-kube-api-access-x2crx\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.261539 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlfmj\" (UniqueName: \"kubernetes.io/projected/a590cf12-4e90-44fe-be6c-b392c7e22208-kube-api-access-wlfmj\") pod \"nova-operator-controller-manager-5fbbf8b6cc-9mmq8\" (UID: \"a590cf12-4e90-44fe-be6c-b392c7e22208\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.261592 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz7hx\" (UniqueName: \"kubernetes.io/projected/94aa3357-932f-4a07-8c32-32b357938142-kube-api-access-jz7hx\") pod \"ovn-operator-controller-manager-bf6d4f946-dh78v\" (UID: \"94aa3357-932f-4a07-8c32-32b357938142\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.261631 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.261654 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gghb\" (UniqueName: \"kubernetes.io/projected/5a8894cb-9bb5-403e-9003-28e8f771f142-kube-api-access-7gghb\") pod \"placement-operator-controller-manager-8665b56d78-8br7q\" (UID: \"5a8894cb-9bb5-403e-9003-28e8f771f142\") " pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.262714 4876 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.262764 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert podName:e7ea8ece-c3d4-46bb-9255-6021533983c4 nodeName:}" failed. No retries permitted until 2025-12-15 07:09:55.762751277 +0000 UTC m=+1121.333894188 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert") pod "openstack-baremetal-operator-controller-manager-689f887b54x442j" (UID: "e7ea8ece-c3d4-46bb-9255-6021533983c4") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.263707 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-t4khg" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.296993 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlfmj\" (UniqueName: \"kubernetes.io/projected/a590cf12-4e90-44fe-be6c-b392c7e22208-kube-api-access-wlfmj\") pod \"nova-operator-controller-manager-5fbbf8b6cc-9mmq8\" (UID: \"a590cf12-4e90-44fe-be6c-b392c7e22208\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.297057 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.301123 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2crx\" (UniqueName: \"kubernetes.io/projected/e7ea8ece-c3d4-46bb-9255-6021533983c4-kube-api-access-x2crx\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.315994 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.316878 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.319269 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-fr7ds" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.341582 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.354398 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.355430 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.362665 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-pgcsl" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.363544 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppm67\" (UniqueName: \"kubernetes.io/projected/f43fd495-2325-4fc5-853a-2289e6398417-kube-api-access-ppm67\") pod \"telemetry-operator-controller-manager-97d456b9-8vn7l\" (UID: \"f43fd495-2325-4fc5-853a-2289e6398417\") " pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.363608 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz7hx\" (UniqueName: \"kubernetes.io/projected/94aa3357-932f-4a07-8c32-32b357938142-kube-api-access-jz7hx\") pod \"ovn-operator-controller-manager-bf6d4f946-dh78v\" (UID: \"94aa3357-932f-4a07-8c32-32b357938142\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.363701 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppxrt\" (UniqueName: \"kubernetes.io/projected/970d703f-35b5-41d2-b2b6-fae4c2fba825-kube-api-access-ppxrt\") pod \"swift-operator-controller-manager-5c6df8f9-cj8nd\" (UID: \"970d703f-35b5-41d2-b2b6-fae4c2fba825\") " pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.363727 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gghb\" (UniqueName: \"kubernetes.io/projected/5a8894cb-9bb5-403e-9003-28e8f771f142-kube-api-access-7gghb\") pod \"placement-operator-controller-manager-8665b56d78-8br7q\" (UID: \"5a8894cb-9bb5-403e-9003-28e8f771f142\") " pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.388159 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.396407 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gghb\" (UniqueName: \"kubernetes.io/projected/5a8894cb-9bb5-403e-9003-28e8f771f142-kube-api-access-7gghb\") pod \"placement-operator-controller-manager-8665b56d78-8br7q\" (UID: \"5a8894cb-9bb5-403e-9003-28e8f771f142\") " pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.399946 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz7hx\" (UniqueName: \"kubernetes.io/projected/94aa3357-932f-4a07-8c32-32b357938142-kube-api-access-jz7hx\") pod \"ovn-operator-controller-manager-bf6d4f946-dh78v\" (UID: \"94aa3357-932f-4a07-8c32-32b357938142\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.405641 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.407215 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.410465 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-mpzbw" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.427952 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.451923 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.464856 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppm67\" (UniqueName: \"kubernetes.io/projected/f43fd495-2325-4fc5-853a-2289e6398417-kube-api-access-ppm67\") pod \"telemetry-operator-controller-manager-97d456b9-8vn7l\" (UID: \"f43fd495-2325-4fc5-853a-2289e6398417\") " pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.464901 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l28v\" (UniqueName: \"kubernetes.io/projected/ce1ac899-0b49-4d41-b5a6-3272ef25023a-kube-api-access-6l28v\") pod \"test-operator-controller-manager-756ccf86c7-htkc7\" (UID: \"ce1ac899-0b49-4d41-b5a6-3272ef25023a\") " pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.464971 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppxrt\" (UniqueName: \"kubernetes.io/projected/970d703f-35b5-41d2-b2b6-fae4c2fba825-kube-api-access-ppxrt\") pod \"swift-operator-controller-manager-5c6df8f9-cj8nd\" (UID: \"970d703f-35b5-41d2-b2b6-fae4c2fba825\") " pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.465017 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdjgj\" (UniqueName: \"kubernetes.io/projected/2af1ad25-d6c3-402a-bbe2-78e0a900a6ca-kube-api-access-wdjgj\") pod \"watcher-operator-controller-manager-55f78b7c4c-vm8xh\" (UID: \"2af1ad25-d6c3-402a-bbe2-78e0a900a6ca\") " pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.472974 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.481473 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.488459 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.488994 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppxrt\" (UniqueName: \"kubernetes.io/projected/970d703f-35b5-41d2-b2b6-fae4c2fba825-kube-api-access-ppxrt\") pod \"swift-operator-controller-manager-5c6df8f9-cj8nd\" (UID: \"970d703f-35b5-41d2-b2b6-fae4c2fba825\") " pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.493775 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppm67\" (UniqueName: \"kubernetes.io/projected/f43fd495-2325-4fc5-853a-2289e6398417-kube-api-access-ppm67\") pod \"telemetry-operator-controller-manager-97d456b9-8vn7l\" (UID: \"f43fd495-2325-4fc5-853a-2289e6398417\") " pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.500475 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.538573 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.540183 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.541153 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.542983 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.549183 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.549584 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-fzw6h" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.552405 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.566781 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdjgj\" (UniqueName: \"kubernetes.io/projected/2af1ad25-d6c3-402a-bbe2-78e0a900a6ca-kube-api-access-wdjgj\") pod \"watcher-operator-controller-manager-55f78b7c4c-vm8xh\" (UID: \"2af1ad25-d6c3-402a-bbe2-78e0a900a6ca\") " pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.566829 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.566883 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l28v\" (UniqueName: \"kubernetes.io/projected/ce1ac899-0b49-4d41-b5a6-3272ef25023a-kube-api-access-6l28v\") pod \"test-operator-controller-manager-756ccf86c7-htkc7\" (UID: \"ce1ac899-0b49-4d41-b5a6-3272ef25023a\") " pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.567278 4876 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.567359 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert podName:ab87f935-876b-42b5-9eb9-85092cd8068f nodeName:}" failed. No retries permitted until 2025-12-15 07:09:56.56732327 +0000 UTC m=+1122.138466241 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert") pod "infra-operator-controller-manager-58944d7758-m5r6k" (UID: "ab87f935-876b-42b5-9eb9-85092cd8068f") : secret "infra-operator-webhook-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.593002 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.596006 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdjgj\" (UniqueName: \"kubernetes.io/projected/2af1ad25-d6c3-402a-bbe2-78e0a900a6ca-kube-api-access-wdjgj\") pod \"watcher-operator-controller-manager-55f78b7c4c-vm8xh\" (UID: \"2af1ad25-d6c3-402a-bbe2-78e0a900a6ca\") " pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.597490 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l28v\" (UniqueName: \"kubernetes.io/projected/ce1ac899-0b49-4d41-b5a6-3272ef25023a-kube-api-access-6l28v\") pod \"test-operator-controller-manager-756ccf86c7-htkc7\" (UID: \"ce1ac899-0b49-4d41-b5a6-3272ef25023a\") " pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.602520 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.623444 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.624354 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.630238 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-b62pb" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.648646 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.648946 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.677341 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.677412 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.677504 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpc4x\" (UniqueName: \"kubernetes.io/projected/6d4d5d32-5a7d-454e-b86d-2513e278a9d5-kube-api-access-dpc4x\") pod \"rabbitmq-cluster-operator-manager-668c99d594-xmt78\" (UID: \"6d4d5d32-5a7d-454e-b86d-2513e278a9d5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.677569 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wx99k\" (UniqueName: \"kubernetes.io/projected/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-kube-api-access-wx99k\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.705575 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.770498 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.779359 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wx99k\" (UniqueName: \"kubernetes.io/projected/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-kube-api-access-wx99k\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.779434 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.779466 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.779492 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.779540 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpc4x\" (UniqueName: \"kubernetes.io/projected/6d4d5d32-5a7d-454e-b86d-2513e278a9d5-kube-api-access-dpc4x\") pod \"rabbitmq-cluster-operator-manager-668c99d594-xmt78\" (UID: \"6d4d5d32-5a7d-454e-b86d-2513e278a9d5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.779895 4876 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.779938 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:09:56.27992524 +0000 UTC m=+1121.851068151 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "webhook-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.779967 4876 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.780055 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert podName:e7ea8ece-c3d4-46bb-9255-6021533983c4 nodeName:}" failed. No retries permitted until 2025-12-15 07:09:56.780028803 +0000 UTC m=+1122.351171764 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert") pod "openstack-baremetal-operator-controller-manager-689f887b54x442j" (UID: "e7ea8ece-c3d4-46bb-9255-6021533983c4") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.780133 4876 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: E1215 07:09:55.780167 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:09:56.280159076 +0000 UTC m=+1121.851301987 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "metrics-server-cert" not found Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.780773 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-95949466-kdbsr"] Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.810491 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpc4x\" (UniqueName: \"kubernetes.io/projected/6d4d5d32-5a7d-454e-b86d-2513e278a9d5-kube-api-access-dpc4x\") pod \"rabbitmq-cluster-operator-manager-668c99d594-xmt78\" (UID: \"6d4d5d32-5a7d-454e-b86d-2513e278a9d5\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.817010 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wx99k\" (UniqueName: \"kubernetes.io/projected/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-kube-api-access-wx99k\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:55 crc kubenswrapper[4876]: I1215 07:09:55.824064 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2"] Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.006864 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.235616 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4"] Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.307891 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.307939 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.308056 4876 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.308097 4876 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.308177 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:09:57.308156412 +0000 UTC m=+1122.879299313 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "webhook-server-cert" not found Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.308195 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:09:57.308189093 +0000 UTC m=+1122.879332004 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "metrics-server-cert" not found Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.485126 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j"] Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.495981 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70e313c3_63de_489b_bf09_6072d26ad862.slice/crio-69a39953632830d213d6ec1904a4c6b8a863751b51caa2bc02e1a805b79a56a8 WatchSource:0}: Error finding container 69a39953632830d213d6ec1904a4c6b8a863751b51caa2bc02e1a805b79a56a8: Status 404 returned error can't find the container with id 69a39953632830d213d6ec1904a4c6b8a863751b51caa2bc02e1a805b79a56a8 Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.609460 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft"] Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.613956 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.614115 4876 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.614172 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert podName:ab87f935-876b-42b5-9eb9-85092cd8068f nodeName:}" failed. No retries permitted until 2025-12-15 07:09:58.614155205 +0000 UTC m=+1124.185298116 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert") pod "infra-operator-controller-manager-58944d7758-m5r6k" (UID: "ab87f935-876b-42b5-9eb9-85092cd8068f") : secret "infra-operator-webhook-server-cert" not found Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.643904 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff12b926_5daa_4bd3_b49a_154d80442fa8.slice/crio-ff838eea5f8672cf05803e4c25a7d906d7885e6f160c256c6a396f8e478f0c25 WatchSource:0}: Error finding container ff838eea5f8672cf05803e4c25a7d906d7885e6f160c256c6a396f8e478f0c25: Status 404 returned error can't find the container with id ff838eea5f8672cf05803e4c25a7d906d7885e6f160c256c6a396f8e478f0c25 Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.646766 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6"] Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.658007 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd"] Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.658038 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2" event={"ID":"4b7cafd3-81e2-419f-879e-5761126af781","Type":"ContainerStarted","Data":"42315c870b13a8bd298523d665ab7a6be28a3ee464cafadf75a44d36d1009a10"} Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.662400 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f2624c6_7e76_4413_b586_246d5cab2346.slice/crio-ac826ee2f6dd0ef9703e5493f16e81770c28f9d87e50b594b50c0cc8bfb8cf28 WatchSource:0}: Error finding container ac826ee2f6dd0ef9703e5493f16e81770c28f9d87e50b594b50c0cc8bfb8cf28: Status 404 returned error can't find the container with id ac826ee2f6dd0ef9703e5493f16e81770c28f9d87e50b594b50c0cc8bfb8cf28 Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.662580 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4" event={"ID":"7bc70792-76af-4d00-a2b2-2fdd6b89be16","Type":"ContainerStarted","Data":"0381dbf83e7412d8574e4023a65cf9e70f017ec8166ee6c509ce66ecd1c72dcd"} Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.664104 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl"] Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.664320 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j" event={"ID":"70e313c3-63de-489b-bf09-6072d26ad862","Type":"ContainerStarted","Data":"69a39953632830d213d6ec1904a4c6b8a863751b51caa2bc02e1a805b79a56a8"} Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.669035 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-95949466-kdbsr" event={"ID":"cbcc0e4f-8e39-4602-9c7a-512ff7ed2b87","Type":"ContainerStarted","Data":"85938913937db2cd322f37bfc640abd0bb255d8fb4039d422e5227bfe62c4c2e"} Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.687477 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2"] Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.754378 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7"] Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.756652 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5"] Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.760641 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf589b271_7169_4562_80b9_35f85f4ecfec.slice/crio-3be64b51baf0bac14fe4c710c1d4a267105d3e637a36c2f16e7b5ebdcfc0622d WatchSource:0}: Error finding container 3be64b51baf0bac14fe4c710c1d4a267105d3e637a36c2f16e7b5ebdcfc0622d: Status 404 returned error can't find the container with id 3be64b51baf0bac14fe4c710c1d4a267105d3e637a36c2f16e7b5ebdcfc0622d Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.761702 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh"] Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.764115 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2af1ad25_d6c3_402a_bbe2_78e0a900a6ca.slice/crio-552192d1984bab3d501244a83c1e1575f1e957a2d49f11b918066b25d9c55ac3 WatchSource:0}: Error finding container 552192d1984bab3d501244a83c1e1575f1e957a2d49f11b918066b25d9c55ac3: Status 404 returned error can't find the container with id 552192d1984bab3d501244a83c1e1575f1e957a2d49f11b918066b25d9c55ac3 Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.769678 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v"] Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.772700 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a8894cb_9bb5_403e_9003_28e8f771f142.slice/crio-7b8db6629c9e9b3511d28ce97aebc16e1d1643849c0bd1c3e8f727adae1c4dbc WatchSource:0}: Error finding container 7b8db6629c9e9b3511d28ce97aebc16e1d1643849c0bd1c3e8f727adae1c4dbc: Status 404 returned error can't find the container with id 7b8db6629c9e9b3511d28ce97aebc16e1d1643849c0bd1c3e8f727adae1c4dbc Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.776192 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94aa3357_932f_4a07_8c32_32b357938142.slice/crio-798795e34ec8dc00f7333f542a1c0a44c609c381257841ce3d9721a297e7ba26 WatchSource:0}: Error finding container 798795e34ec8dc00f7333f542a1c0a44c609c381257841ce3d9721a297e7ba26: Status 404 returned error can't find the container with id 798795e34ec8dc00f7333f542a1c0a44c609c381257841ce3d9721a297e7ba26 Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.776698 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq"] Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.784563 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6l28v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-756ccf86c7-htkc7_openstack-operators(ce1ac899-0b49-4d41-b5a6-3272ef25023a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.784635 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q"] Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.785832 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" podUID="ce1ac899-0b49-4d41-b5a6-3272ef25023a" Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.817200 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.817415 4876 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.817472 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert podName:e7ea8ece-c3d4-46bb-9255-6021533983c4 nodeName:}" failed. No retries permitted until 2025-12-15 07:09:58.817457874 +0000 UTC m=+1124.388600785 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert") pod "openstack-baremetal-operator-controller-manager-689f887b54x442j" (UID: "e7ea8ece-c3d4-46bb-9255-6021533983c4") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.915615 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l"] Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.924426 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz"] Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.928583 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ppm67,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-97d456b9-8vn7l_openstack-operators(f43fd495-2325-4fc5-853a-2289e6398417): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.929996 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" podUID="f43fd495-2325-4fc5-853a-2289e6398417" Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.934386 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85c28239_78ab_4206_b204_8fa54acf2968.slice/crio-e2317783dfb87bef3288620cd469271205803277f56a82effddc8e36b4d0e7d2 WatchSource:0}: Error finding container e2317783dfb87bef3288620cd469271205803277f56a82effddc8e36b4d0e7d2: Status 404 returned error can't find the container with id e2317783dfb87bef3288620cd469271205803277f56a82effddc8e36b4d0e7d2 Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.936937 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd"] Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.939587 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qk4pg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-68c649d9d-2mxzz_openstack-operators(85c28239-78ab-4206-b204-8fa54acf2968): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.940684 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" podUID="85c28239-78ab-4206-b204-8fa54acf2968" Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.942761 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod970d703f_35b5_41d2_b2b6_fae4c2fba825.slice/crio-471623c67f1c4a1de8a04ca07d94ca3b4f35f2bf0624887f26fa2a266d10b324 WatchSource:0}: Error finding container 471623c67f1c4a1de8a04ca07d94ca3b4f35f2bf0624887f26fa2a266d10b324: Status 404 returned error can't find the container with id 471623c67f1c4a1de8a04ca07d94ca3b4f35f2bf0624887f26fa2a266d10b324 Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.948015 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78"] Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.948432 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d4d5d32_5a7d_454e_b86d_2513e278a9d5.slice/crio-f4a6b8fc3397fd8ffc3047dfc68a36701b196f74dd781007662da78ddd08c881 WatchSource:0}: Error finding container f4a6b8fc3397fd8ffc3047dfc68a36701b196f74dd781007662da78ddd08c881: Status 404 returned error can't find the container with id f4a6b8fc3397fd8ffc3047dfc68a36701b196f74dd781007662da78ddd08c881 Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.951439 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dpc4x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-xmt78_openstack-operators(6d4d5d32-5a7d-454e-b86d-2513e278a9d5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.953584 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" podUID="6d4d5d32-5a7d-454e-b86d-2513e278a9d5" Dec 15 07:09:56 crc kubenswrapper[4876]: I1215 07:09:56.961642 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8"] Dec 15 07:09:56 crc kubenswrapper[4876]: W1215 07:09:56.962804 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda590cf12_4e90_44fe_be6c_b392c7e22208.slice/crio-6179c68c8be04f47dcb3769ee1d155a3f7d6101f9c89a75d6b4c508dbe06aba1 WatchSource:0}: Error finding container 6179c68c8be04f47dcb3769ee1d155a3f7d6101f9c89a75d6b4c508dbe06aba1: Status 404 returned error can't find the container with id 6179c68c8be04f47dcb3769ee1d155a3f7d6101f9c89a75d6b4c508dbe06aba1 Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.965188 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wlfmj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-5fbbf8b6cc-9mmq8_openstack-operators(a590cf12-4e90-44fe-be6c-b392c7e22208): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 15 07:09:56 crc kubenswrapper[4876]: E1215 07:09:56.966332 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" podUID="a590cf12-4e90-44fe-be6c-b392c7e22208" Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.323248 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.323764 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.326908 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.326952 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:57 crc kubenswrapper[4876]: E1215 07:09:57.327067 4876 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 15 07:09:57 crc kubenswrapper[4876]: E1215 07:09:57.327125 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:09:59.327094265 +0000 UTC m=+1124.898237176 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "metrics-server-cert" not found Dec 15 07:09:57 crc kubenswrapper[4876]: E1215 07:09:57.327393 4876 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 15 07:09:57 crc kubenswrapper[4876]: E1215 07:09:57.327421 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:09:59.327414503 +0000 UTC m=+1124.898557414 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "webhook-server-cert" not found Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.678962 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" event={"ID":"cb583ab2-3f9a-42e3-afd3-630dd25eb152","Type":"ContainerStarted","Data":"bd1146a46ab915872ad7721d97f28d840a1bb8067b7e659dbdbdaeeeda1b3f4c"} Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.680681 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6" event={"ID":"f4c838cf-5a9f-445d-b43b-788ced3037cf","Type":"ContainerStarted","Data":"d76cde65e6781905514417212b3db4258a0bf932023c18da82515f5ece27ef98"} Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.681913 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" event={"ID":"970d703f-35b5-41d2-b2b6-fae4c2fba825","Type":"ContainerStarted","Data":"471623c67f1c4a1de8a04ca07d94ca3b4f35f2bf0624887f26fa2a266d10b324"} Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.683488 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" event={"ID":"c9c3edf2-0545-4a2f-9b53-53317c8c028b","Type":"ContainerStarted","Data":"c177c9e260312e8fe5fcb75e3d5a383be68f5e2e062882d90b9de67a6673e4a8"} Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.685535 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" event={"ID":"ff12b926-5daa-4bd3-b49a-154d80442fa8","Type":"ContainerStarted","Data":"ff838eea5f8672cf05803e4c25a7d906d7885e6f160c256c6a396f8e478f0c25"} Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.688768 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" event={"ID":"6d4d5d32-5a7d-454e-b86d-2513e278a9d5","Type":"ContainerStarted","Data":"f4a6b8fc3397fd8ffc3047dfc68a36701b196f74dd781007662da78ddd08c881"} Dec 15 07:09:57 crc kubenswrapper[4876]: E1215 07:09:57.693349 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" podUID="6d4d5d32-5a7d-454e-b86d-2513e278a9d5" Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.698027 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh" event={"ID":"2af1ad25-d6c3-402a-bbe2-78e0a900a6ca","Type":"ContainerStarted","Data":"552192d1984bab3d501244a83c1e1575f1e957a2d49f11b918066b25d9c55ac3"} Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.706119 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" event={"ID":"94aa3357-932f-4a07-8c32-32b357938142","Type":"ContainerStarted","Data":"798795e34ec8dc00f7333f542a1c0a44c609c381257841ce3d9721a297e7ba26"} Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.712698 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" event={"ID":"a590cf12-4e90-44fe-be6c-b392c7e22208","Type":"ContainerStarted","Data":"6179c68c8be04f47dcb3769ee1d155a3f7d6101f9c89a75d6b4c508dbe06aba1"} Dec 15 07:09:57 crc kubenswrapper[4876]: E1215 07:09:57.714237 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\"" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" podUID="a590cf12-4e90-44fe-be6c-b392c7e22208" Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.715092 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" event={"ID":"f589b271-7169-4562-80b9-35f85f4ecfec","Type":"ContainerStarted","Data":"3be64b51baf0bac14fe4c710c1d4a267105d3e637a36c2f16e7b5ebdcfc0622d"} Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.717655 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" event={"ID":"ce1ac899-0b49-4d41-b5a6-3272ef25023a","Type":"ContainerStarted","Data":"ebcf35c2a9262c38ce7b522754cfac868af634e9bb79b2d26573f37e124388b9"} Dec 15 07:09:57 crc kubenswrapper[4876]: E1215 07:09:57.719399 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\"" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" podUID="ce1ac899-0b49-4d41-b5a6-3272ef25023a" Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.723279 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" event={"ID":"7bcbfa3a-5871-44db-b07b-2687a995b87f","Type":"ContainerStarted","Data":"473e0c43ee6213f8800a87e26bde886fc43948484a97bfdd19c0280b8b307b93"} Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.740919 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" event={"ID":"f43fd495-2325-4fc5-853a-2289e6398417","Type":"ContainerStarted","Data":"fe8e5ce795839457b1e9f17c2a58eaa5351670189d042e777ebac4c6f001e58b"} Dec 15 07:09:57 crc kubenswrapper[4876]: E1215 07:09:57.763171 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" podUID="f43fd495-2325-4fc5-853a-2289e6398417" Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.765138 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" event={"ID":"6f2624c6-7e76-4413-b586-246d5cab2346","Type":"ContainerStarted","Data":"ac826ee2f6dd0ef9703e5493f16e81770c28f9d87e50b594b50c0cc8bfb8cf28"} Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.775214 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" event={"ID":"85c28239-78ab-4206-b204-8fa54acf2968","Type":"ContainerStarted","Data":"e2317783dfb87bef3288620cd469271205803277f56a82effddc8e36b4d0e7d2"} Dec 15 07:09:57 crc kubenswrapper[4876]: E1215 07:09:57.778404 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" podUID="85c28239-78ab-4206-b204-8fa54acf2968" Dec 15 07:09:57 crc kubenswrapper[4876]: I1215 07:09:57.779888 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" event={"ID":"5a8894cb-9bb5-403e-9003-28e8f771f142","Type":"ContainerStarted","Data":"7b8db6629c9e9b3511d28ce97aebc16e1d1643849c0bd1c3e8f727adae1c4dbc"} Dec 15 07:09:58 crc kubenswrapper[4876]: I1215 07:09:58.655039 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:09:58 crc kubenswrapper[4876]: E1215 07:09:58.655238 4876 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 15 07:09:58 crc kubenswrapper[4876]: E1215 07:09:58.655296 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert podName:ab87f935-876b-42b5-9eb9-85092cd8068f nodeName:}" failed. No retries permitted until 2025-12-15 07:10:02.65527879 +0000 UTC m=+1128.226421701 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert") pod "infra-operator-controller-manager-58944d7758-m5r6k" (UID: "ab87f935-876b-42b5-9eb9-85092cd8068f") : secret "infra-operator-webhook-server-cert" not found Dec 15 07:09:58 crc kubenswrapper[4876]: E1215 07:09:58.794078 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" podUID="6d4d5d32-5a7d-454e-b86d-2513e278a9d5" Dec 15 07:09:58 crc kubenswrapper[4876]: E1215 07:09:58.794142 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\"" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" podUID="a590cf12-4e90-44fe-be6c-b392c7e22208" Dec 15 07:09:58 crc kubenswrapper[4876]: E1215 07:09:58.794677 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" podUID="85c28239-78ab-4206-b204-8fa54acf2968" Dec 15 07:09:58 crc kubenswrapper[4876]: E1215 07:09:58.796684 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" podUID="f43fd495-2325-4fc5-853a-2289e6398417" Dec 15 07:09:58 crc kubenswrapper[4876]: E1215 07:09:58.798784 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\"" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" podUID="ce1ac899-0b49-4d41-b5a6-3272ef25023a" Dec 15 07:09:58 crc kubenswrapper[4876]: I1215 07:09:58.858825 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:09:58 crc kubenswrapper[4876]: E1215 07:09:58.858946 4876 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:09:58 crc kubenswrapper[4876]: E1215 07:09:58.858989 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert podName:e7ea8ece-c3d4-46bb-9255-6021533983c4 nodeName:}" failed. No retries permitted until 2025-12-15 07:10:02.85897459 +0000 UTC m=+1128.430117491 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert") pod "openstack-baremetal-operator-controller-manager-689f887b54x442j" (UID: "e7ea8ece-c3d4-46bb-9255-6021533983c4") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:09:59 crc kubenswrapper[4876]: I1215 07:09:59.365369 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:59 crc kubenswrapper[4876]: I1215 07:09:59.365462 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:09:59 crc kubenswrapper[4876]: E1215 07:09:59.365515 4876 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 15 07:09:59 crc kubenswrapper[4876]: E1215 07:09:59.365596 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:10:03.365577696 +0000 UTC m=+1128.936720607 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "webhook-server-cert" not found Dec 15 07:09:59 crc kubenswrapper[4876]: E1215 07:09:59.365865 4876 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 15 07:09:59 crc kubenswrapper[4876]: E1215 07:09:59.365971 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:10:03.365936576 +0000 UTC m=+1128.937079487 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "metrics-server-cert" not found Dec 15 07:10:02 crc kubenswrapper[4876]: I1215 07:10:02.724875 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:10:02 crc kubenswrapper[4876]: E1215 07:10:02.725069 4876 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 15 07:10:02 crc kubenswrapper[4876]: E1215 07:10:02.725396 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert podName:ab87f935-876b-42b5-9eb9-85092cd8068f nodeName:}" failed. No retries permitted until 2025-12-15 07:10:10.725379509 +0000 UTC m=+1136.296522420 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert") pod "infra-operator-controller-manager-58944d7758-m5r6k" (UID: "ab87f935-876b-42b5-9eb9-85092cd8068f") : secret "infra-operator-webhook-server-cert" not found Dec 15 07:10:02 crc kubenswrapper[4876]: I1215 07:10:02.927833 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:10:02 crc kubenswrapper[4876]: E1215 07:10:02.928079 4876 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:10:02 crc kubenswrapper[4876]: E1215 07:10:02.928312 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert podName:e7ea8ece-c3d4-46bb-9255-6021533983c4 nodeName:}" failed. No retries permitted until 2025-12-15 07:10:10.928274257 +0000 UTC m=+1136.499417168 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert") pod "openstack-baremetal-operator-controller-manager-689f887b54x442j" (UID: "e7ea8ece-c3d4-46bb-9255-6021533983c4") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:10:03 crc kubenswrapper[4876]: I1215 07:10:03.435193 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:10:03 crc kubenswrapper[4876]: I1215 07:10:03.435247 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:10:03 crc kubenswrapper[4876]: E1215 07:10:03.435383 4876 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 15 07:10:03 crc kubenswrapper[4876]: E1215 07:10:03.435396 4876 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 15 07:10:03 crc kubenswrapper[4876]: E1215 07:10:03.435452 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:10:11.435435108 +0000 UTC m=+1137.006578019 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "metrics-server-cert" not found Dec 15 07:10:03 crc kubenswrapper[4876]: E1215 07:10:03.435465 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:10:11.435459798 +0000 UTC m=+1137.006602819 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "webhook-server-cert" not found Dec 15 07:10:08 crc kubenswrapper[4876]: E1215 07:10:08.778251 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 15 07:10:08 crc kubenswrapper[4876]: E1215 07:10:08.778906 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rsqmf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-7cd87b778f-v56p5_openstack-operators(f589b271-7169-4562-80b9-35f85f4ecfec): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:10:08 crc kubenswrapper[4876]: E1215 07:10:08.780403 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" podUID="f589b271-7169-4562-80b9-35f85f4ecfec" Dec 15 07:10:08 crc kubenswrapper[4876]: E1215 07:10:08.904420 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" podUID="f589b271-7169-4562-80b9-35f85f4ecfec" Dec 15 07:10:09 crc kubenswrapper[4876]: E1215 07:10:09.417874 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 15 07:10:09 crc kubenswrapper[4876]: E1215 07:10:09.418053 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mjb62,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-59b8dcb766-8k4s2_openstack-operators(c9c3edf2-0545-4a2f-9b53-53317c8c028b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:10:09 crc kubenswrapper[4876]: E1215 07:10:09.419191 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" podUID="c9c3edf2-0545-4a2f-9b53-53317c8c028b" Dec 15 07:10:09 crc kubenswrapper[4876]: E1215 07:10:09.910207 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429\\\"\"" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" podUID="c9c3edf2-0545-4a2f-9b53-53317c8c028b" Dec 15 07:10:09 crc kubenswrapper[4876]: E1215 07:10:09.920412 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:5370dc4a8e776923eec00bb50cbdb2e390e9dde50be26bdc04a216bd2d6b5027" Dec 15 07:10:09 crc kubenswrapper[4876]: E1215 07:10:09.920575 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:5370dc4a8e776923eec00bb50cbdb2e390e9dde50be26bdc04a216bd2d6b5027,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vpzlf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-767f9d7567-tmckd_openstack-operators(6f2624c6-7e76-4413-b586-246d5cab2346): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:10:09 crc kubenswrapper[4876]: E1215 07:10:09.922020 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" podUID="6f2624c6-7e76-4413-b586-246d5cab2346" Dec 15 07:10:10 crc kubenswrapper[4876]: E1215 07:10:10.457847 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 15 07:10:10 crc kubenswrapper[4876]: E1215 07:10:10.458311 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7gghb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-8665b56d78-8br7q_openstack-operators(5a8894cb-9bb5-403e-9003-28e8f771f142): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:10:10 crc kubenswrapper[4876]: E1215 07:10:10.459622 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" podUID="5a8894cb-9bb5-403e-9003-28e8f771f142" Dec 15 07:10:10 crc kubenswrapper[4876]: I1215 07:10:10.782001 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:10:10 crc kubenswrapper[4876]: E1215 07:10:10.782874 4876 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 15 07:10:10 crc kubenswrapper[4876]: E1215 07:10:10.782928 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert podName:ab87f935-876b-42b5-9eb9-85092cd8068f nodeName:}" failed. No retries permitted until 2025-12-15 07:10:26.782911747 +0000 UTC m=+1152.354054658 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert") pod "infra-operator-controller-manager-58944d7758-m5r6k" (UID: "ab87f935-876b-42b5-9eb9-85092cd8068f") : secret "infra-operator-webhook-server-cert" not found Dec 15 07:10:10 crc kubenswrapper[4876]: E1215 07:10:10.917178 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:5370dc4a8e776923eec00bb50cbdb2e390e9dde50be26bdc04a216bd2d6b5027\\\"\"" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" podUID="6f2624c6-7e76-4413-b586-246d5cab2346" Dec 15 07:10:10 crc kubenswrapper[4876]: E1215 07:10:10.917406 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\"" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" podUID="5a8894cb-9bb5-403e-9003-28e8f771f142" Dec 15 07:10:10 crc kubenswrapper[4876]: I1215 07:10:10.984301 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:10:10 crc kubenswrapper[4876]: E1215 07:10:10.984457 4876 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:10:10 crc kubenswrapper[4876]: E1215 07:10:10.984509 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert podName:e7ea8ece-c3d4-46bb-9255-6021533983c4 nodeName:}" failed. No retries permitted until 2025-12-15 07:10:26.984495638 +0000 UTC m=+1152.555638549 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert") pod "openstack-baremetal-operator-controller-manager-689f887b54x442j" (UID: "e7ea8ece-c3d4-46bb-9255-6021533983c4") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 15 07:10:11 crc kubenswrapper[4876]: E1215 07:10:11.233611 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 15 07:10:11 crc kubenswrapper[4876]: E1215 07:10:11.233821 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-62gjb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-6ccf486b9-ndjnl_openstack-operators(7bcbfa3a-5871-44db-b07b-2687a995b87f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:10:11 crc kubenswrapper[4876]: E1215 07:10:11.235882 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" podUID="7bcbfa3a-5871-44db-b07b-2687a995b87f" Dec 15 07:10:11 crc kubenswrapper[4876]: I1215 07:10:11.494572 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:10:11 crc kubenswrapper[4876]: I1215 07:10:11.494634 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:10:11 crc kubenswrapper[4876]: E1215 07:10:11.494994 4876 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 15 07:10:11 crc kubenswrapper[4876]: E1215 07:10:11.495065 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs podName:3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8 nodeName:}" failed. No retries permitted until 2025-12-15 07:10:27.495049154 +0000 UTC m=+1153.066192065 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs") pod "openstack-operator-controller-manager-56f6fbdf6-824mz" (UID: "3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8") : secret "webhook-server-cert" not found Dec 15 07:10:11 crc kubenswrapper[4876]: I1215 07:10:11.500903 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-metrics-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:10:11 crc kubenswrapper[4876]: E1215 07:10:11.921591 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" podUID="7bcbfa3a-5871-44db-b07b-2687a995b87f" Dec 15 07:10:12 crc kubenswrapper[4876]: E1215 07:10:12.152775 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991" Dec 15 07:10:12 crc kubenswrapper[4876]: E1215 07:10:12.153545 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ppxrt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5c6df8f9-cj8nd_openstack-operators(970d703f-35b5-41d2-b2b6-fae4c2fba825): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:10:12 crc kubenswrapper[4876]: E1215 07:10:12.155426 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" podUID="970d703f-35b5-41d2-b2b6-fae4c2fba825" Dec 15 07:10:12 crc kubenswrapper[4876]: E1215 07:10:12.927928 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\"" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" podUID="970d703f-35b5-41d2-b2b6-fae4c2fba825" Dec 15 07:10:12 crc kubenswrapper[4876]: E1215 07:10:12.994649 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad" Dec 15 07:10:12 crc kubenswrapper[4876]: E1215 07:10:12.994848 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-k9n8s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-f76f4954c-7qzqq_openstack-operators(cb583ab2-3f9a-42e3-afd3-630dd25eb152): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:10:12 crc kubenswrapper[4876]: E1215 07:10:12.996237 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" podUID="cb583ab2-3f9a-42e3-afd3-630dd25eb152" Dec 15 07:10:13 crc kubenswrapper[4876]: E1215 07:10:13.936340 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" podUID="cb583ab2-3f9a-42e3-afd3-630dd25eb152" Dec 15 07:10:15 crc kubenswrapper[4876]: E1215 07:10:15.316013 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 15 07:10:15 crc kubenswrapper[4876]: E1215 07:10:15.316238 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jz7hx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-bf6d4f946-dh78v_openstack-operators(94aa3357-932f-4a07-8c32-32b357938142): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:10:15 crc kubenswrapper[4876]: E1215 07:10:15.317438 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" podUID="94aa3357-932f-4a07-8c32-32b357938142" Dec 15 07:10:15 crc kubenswrapper[4876]: E1215 07:10:15.945834 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" podUID="94aa3357-932f-4a07-8c32-32b357938142" Dec 15 07:10:16 crc kubenswrapper[4876]: E1215 07:10:16.433948 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 15 07:10:16 crc kubenswrapper[4876]: E1215 07:10:16.434166 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kss7k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-5c7cbf548f-454ft_openstack-operators(ff12b926-5daa-4bd3-b49a-154d80442fa8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:10:16 crc kubenswrapper[4876]: E1215 07:10:16.435345 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" podUID="ff12b926-5daa-4bd3-b49a-154d80442fa8" Dec 15 07:10:16 crc kubenswrapper[4876]: E1215 07:10:16.960376 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" podUID="ff12b926-5daa-4bd3-b49a-154d80442fa8" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.968472 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh" event={"ID":"2af1ad25-d6c3-402a-bbe2-78e0a900a6ca","Type":"ContainerStarted","Data":"4e2ffd1d4994b743fbb10997d745472f0ab6f7c8ff98cda3d948170ba658940b"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.968888 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.969925 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" event={"ID":"f43fd495-2325-4fc5-853a-2289e6398417","Type":"ContainerStarted","Data":"7720927cdf1f8795318ca609185eaf6b8ec258b73e15c5cb5064b849e40fdd56"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.970372 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.971662 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j" event={"ID":"70e313c3-63de-489b-bf09-6072d26ad862","Type":"ContainerStarted","Data":"61f4846ecdf051e53e1fe629c1f2ca9f69f8fa08357dce55b1e8bfd6530c01ec"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.971972 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.973258 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-95949466-kdbsr" event={"ID":"cbcc0e4f-8e39-4602-9c7a-512ff7ed2b87","Type":"ContainerStarted","Data":"9389d0f0a8f261cebecfa7c9c68722814823ac59b8a152a2969f2dec34262e0e"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.973597 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-95949466-kdbsr" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.975170 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4" event={"ID":"7bc70792-76af-4d00-a2b2-2fdd6b89be16","Type":"ContainerStarted","Data":"cbaad2aab0244445b85dbdfc13981cce6fba658707da31c253b5b22ac679fb78"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.975522 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.977882 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" event={"ID":"ce1ac899-0b49-4d41-b5a6-3272ef25023a","Type":"ContainerStarted","Data":"037eeed6a185f1f7a0088daa9728946c4b9eeb5e47b7d3ee0ca620bc80f0fd69"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.978308 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.979460 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" event={"ID":"6d4d5d32-5a7d-454e-b86d-2513e278a9d5","Type":"ContainerStarted","Data":"812df93950d3ee74c210347180336a8fb25b060be90c247eff269864e865146b"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.980665 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2" event={"ID":"4b7cafd3-81e2-419f-879e-5761126af781","Type":"ContainerStarted","Data":"5cf804006f96dd6a26342d8400fe6aa5f58ab2230b5e5e07fbdd81db3df5c03b"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.980984 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.982026 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6" event={"ID":"f4c838cf-5a9f-445d-b43b-788ced3037cf","Type":"ContainerStarted","Data":"b527064cfb5eae114bb31926d754830aa69b73f86e2e898e274a3d98298dda48"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.982376 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.983363 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" event={"ID":"85c28239-78ab-4206-b204-8fa54acf2968","Type":"ContainerStarted","Data":"d61b6f88d4724974884b671337ef9e8980aa8e80305b5e524a58338497d5a7f6"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.983766 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.984761 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" event={"ID":"a590cf12-4e90-44fe-be6c-b392c7e22208","Type":"ContainerStarted","Data":"b09ac53a61b9c51364e3a195fa62aa93f0372744d0aefb502a9b7adb71db893a"} Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.985215 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" Dec 15 07:10:19 crc kubenswrapper[4876]: I1215 07:10:19.988988 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh" podStartSLOduration=2.7871570329999997 podStartE2EDuration="24.988969205s" podCreationTimestamp="2025-12-15 07:09:55 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.777258985 +0000 UTC m=+1122.348401896" lastFinishedPulling="2025-12-15 07:10:18.979071157 +0000 UTC m=+1144.550214068" observedRunningTime="2025-12-15 07:10:19.983943915 +0000 UTC m=+1145.555086826" watchObservedRunningTime="2025-12-15 07:10:19.988969205 +0000 UTC m=+1145.560112116" Dec 15 07:10:20 crc kubenswrapper[4876]: I1215 07:10:20.007848 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4" podStartSLOduration=3.376550883 podStartE2EDuration="26.007831985s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.347717373 +0000 UTC m=+1121.918860284" lastFinishedPulling="2025-12-15 07:10:18.978998475 +0000 UTC m=+1144.550141386" observedRunningTime="2025-12-15 07:10:20.004170652 +0000 UTC m=+1145.575313563" watchObservedRunningTime="2025-12-15 07:10:20.007831985 +0000 UTC m=+1145.578974896" Dec 15 07:10:20 crc kubenswrapper[4876]: I1215 07:10:20.017677 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-xmt78" podStartSLOduration=2.39243897 podStartE2EDuration="25.017659451s" podCreationTimestamp="2025-12-15 07:09:55 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.951020064 +0000 UTC m=+1122.522162975" lastFinishedPulling="2025-12-15 07:10:19.576240545 +0000 UTC m=+1145.147383456" observedRunningTime="2025-12-15 07:10:20.015097429 +0000 UTC m=+1145.586240340" watchObservedRunningTime="2025-12-15 07:10:20.017659451 +0000 UTC m=+1145.588802352" Dec 15 07:10:20 crc kubenswrapper[4876]: I1215 07:10:20.027180 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" podStartSLOduration=2.314631373 podStartE2EDuration="25.027165077s" podCreationTimestamp="2025-12-15 07:09:55 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.784388015 +0000 UTC m=+1122.355530936" lastFinishedPulling="2025-12-15 07:10:19.496921729 +0000 UTC m=+1145.068064640" observedRunningTime="2025-12-15 07:10:20.026234962 +0000 UTC m=+1145.597377863" watchObservedRunningTime="2025-12-15 07:10:20.027165077 +0000 UTC m=+1145.598307988" Dec 15 07:10:20 crc kubenswrapper[4876]: I1215 07:10:20.138779 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6" podStartSLOduration=3.842683392 podStartE2EDuration="26.138762231s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.682956757 +0000 UTC m=+1122.254099668" lastFinishedPulling="2025-12-15 07:10:18.979035596 +0000 UTC m=+1144.550178507" observedRunningTime="2025-12-15 07:10:20.137426684 +0000 UTC m=+1145.708569605" watchObservedRunningTime="2025-12-15 07:10:20.138762231 +0000 UTC m=+1145.709905152" Dec 15 07:10:20 crc kubenswrapper[4876]: I1215 07:10:20.198181 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-95949466-kdbsr" podStartSLOduration=4.189717117 podStartE2EDuration="26.19816084s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:55.863493986 +0000 UTC m=+1121.434636897" lastFinishedPulling="2025-12-15 07:10:17.871937709 +0000 UTC m=+1143.443080620" observedRunningTime="2025-12-15 07:10:20.194058924 +0000 UTC m=+1145.765201845" watchObservedRunningTime="2025-12-15 07:10:20.19816084 +0000 UTC m=+1145.769303751" Dec 15 07:10:20 crc kubenswrapper[4876]: I1215 07:10:20.221115 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j" podStartSLOduration=3.744022501 podStartE2EDuration="26.221089933s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.501966804 +0000 UTC m=+1122.073109715" lastFinishedPulling="2025-12-15 07:10:18.979034236 +0000 UTC m=+1144.550177147" observedRunningTime="2025-12-15 07:10:20.216480934 +0000 UTC m=+1145.787623845" watchObservedRunningTime="2025-12-15 07:10:20.221089933 +0000 UTC m=+1145.792232844" Dec 15 07:10:20 crc kubenswrapper[4876]: I1215 07:10:20.232996 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" podStartSLOduration=2.695015284 podStartE2EDuration="25.232980086s" podCreationTimestamp="2025-12-15 07:09:55 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.92844911 +0000 UTC m=+1122.499592021" lastFinishedPulling="2025-12-15 07:10:19.466413912 +0000 UTC m=+1145.037556823" observedRunningTime="2025-12-15 07:10:20.228077769 +0000 UTC m=+1145.799220690" watchObservedRunningTime="2025-12-15 07:10:20.232980086 +0000 UTC m=+1145.804122997" Dec 15 07:10:20 crc kubenswrapper[4876]: I1215 07:10:20.247618 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2" podStartSLOduration=2.663055479 podStartE2EDuration="26.247601818s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:55.862231941 +0000 UTC m=+1121.433374852" lastFinishedPulling="2025-12-15 07:10:19.44677828 +0000 UTC m=+1145.017921191" observedRunningTime="2025-12-15 07:10:20.244591943 +0000 UTC m=+1145.815734854" watchObservedRunningTime="2025-12-15 07:10:20.247601818 +0000 UTC m=+1145.818744729" Dec 15 07:10:20 crc kubenswrapper[4876]: I1215 07:10:20.361041 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" podStartSLOduration=3.846716725 podStartE2EDuration="26.361017482s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.939030858 +0000 UTC m=+1122.510173769" lastFinishedPulling="2025-12-15 07:10:19.453331615 +0000 UTC m=+1145.024474526" observedRunningTime="2025-12-15 07:10:20.300115062 +0000 UTC m=+1145.871257973" watchObservedRunningTime="2025-12-15 07:10:20.361017482 +0000 UTC m=+1145.932160403" Dec 15 07:10:20 crc kubenswrapper[4876]: I1215 07:10:20.364664 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" podStartSLOduration=3.855884693 podStartE2EDuration="26.364653544s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.965067449 +0000 UTC m=+1122.536210360" lastFinishedPulling="2025-12-15 07:10:19.4738363 +0000 UTC m=+1145.044979211" observedRunningTime="2025-12-15 07:10:20.361549617 +0000 UTC m=+1145.932692528" watchObservedRunningTime="2025-12-15 07:10:20.364653544 +0000 UTC m=+1145.935796445" Dec 15 07:10:24 crc kubenswrapper[4876]: I1215 07:10:24.935404 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-95949466-kdbsr" Dec 15 07:10:24 crc kubenswrapper[4876]: I1215 07:10:24.965852 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-5cf45c46bd-djwl2" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.044531 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" event={"ID":"f589b271-7169-4562-80b9-35f85f4ecfec","Type":"ContainerStarted","Data":"bc83e6b0f46ff07ea493c5758458647a3e111ca0e0d8d2b9566df252e5a34f28"} Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.045082 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.046244 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" event={"ID":"6f2624c6-7e76-4413-b586-246d5cab2346","Type":"ContainerStarted","Data":"ece8d43b33c482a7f7552bbe9bf2bb0d41556bf3a41c55aa6ef535d780ca39d1"} Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.046429 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.065869 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" podStartSLOduration=3.585360196 podStartE2EDuration="31.065848374s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.762289294 +0000 UTC m=+1122.333432195" lastFinishedPulling="2025-12-15 07:10:24.242777462 +0000 UTC m=+1149.813920373" observedRunningTime="2025-12-15 07:10:25.061956925 +0000 UTC m=+1150.633099836" watchObservedRunningTime="2025-12-15 07:10:25.065848374 +0000 UTC m=+1150.636991285" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.085176 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" podStartSLOduration=3.41813215 podStartE2EDuration="31.085158176s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.668383287 +0000 UTC m=+1122.239526188" lastFinishedPulling="2025-12-15 07:10:24.335409313 +0000 UTC m=+1149.906552214" observedRunningTime="2025-12-15 07:10:25.079898318 +0000 UTC m=+1150.651041249" watchObservedRunningTime="2025-12-15 07:10:25.085158176 +0000 UTC m=+1150.656301087" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.094723 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-fjmj4" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.161933 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-f458558d7-pcz5j" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.182998 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5fdd9786f7-b6nw6" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.484972 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2mxzz" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.505033 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-9mmq8" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.651806 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-97d456b9-8vn7l" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.708441 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-756ccf86c7-htkc7" Dec 15 07:10:25 crc kubenswrapper[4876]: I1215 07:10:25.774794 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-55f78b7c4c-vm8xh" Dec 15 07:10:26 crc kubenswrapper[4876]: I1215 07:10:26.054032 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" event={"ID":"c9c3edf2-0545-4a2f-9b53-53317c8c028b","Type":"ContainerStarted","Data":"4665860c46c7ca24ba571d395f677e4500d556d63b302e83ddba995defe527c4"} Dec 15 07:10:26 crc kubenswrapper[4876]: I1215 07:10:26.055132 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" Dec 15 07:10:26 crc kubenswrapper[4876]: I1215 07:10:26.078376 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" podStartSLOduration=3.54916708 podStartE2EDuration="32.078359605s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.701985011 +0000 UTC m=+1122.273127922" lastFinishedPulling="2025-12-15 07:10:25.231177526 +0000 UTC m=+1150.802320447" observedRunningTime="2025-12-15 07:10:26.076089802 +0000 UTC m=+1151.647232733" watchObservedRunningTime="2025-12-15 07:10:26.078359605 +0000 UTC m=+1151.649502516" Dec 15 07:10:26 crc kubenswrapper[4876]: I1215 07:10:26.819061 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:10:26 crc kubenswrapper[4876]: I1215 07:10:26.827276 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab87f935-876b-42b5-9eb9-85092cd8068f-cert\") pod \"infra-operator-controller-manager-58944d7758-m5r6k\" (UID: \"ab87f935-876b-42b5-9eb9-85092cd8068f\") " pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:10:26 crc kubenswrapper[4876]: I1215 07:10:26.876172 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-bp574" Dec 15 07:10:26 crc kubenswrapper[4876]: I1215 07:10:26.883776 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.023331 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.027684 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7ea8ece-c3d4-46bb-9255-6021533983c4-cert\") pod \"openstack-baremetal-operator-controller-manager-689f887b54x442j\" (UID: \"e7ea8ece-c3d4-46bb-9255-6021533983c4\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.036408 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-xn4m6" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.044688 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.317739 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j"] Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.322315 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.322349 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.322388 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.323052 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4baa74438474f842b95837cb376641c83032001b2fc70604008ed449440b26a2"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.323125 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://4baa74438474f842b95837cb376641c83032001b2fc70604008ed449440b26a2" gracePeriod=600 Dec 15 07:10:27 crc kubenswrapper[4876]: W1215 07:10:27.328772 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7ea8ece_c3d4_46bb_9255_6021533983c4.slice/crio-c365628da4dbe5f77a51854f5d6c52672d1e564fab5b5365347882a8a8b87357 WatchSource:0}: Error finding container c365628da4dbe5f77a51854f5d6c52672d1e564fab5b5365347882a8a8b87357: Status 404 returned error can't find the container with id c365628da4dbe5f77a51854f5d6c52672d1e564fab5b5365347882a8a8b87357 Dec 15 07:10:27 crc kubenswrapper[4876]: W1215 07:10:27.353843 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab87f935_876b_42b5_9eb9_85092cd8068f.slice/crio-d13d555e56705c349a7b2841ff19d6829c78c97a7fac5bf1f1519029d4071839 WatchSource:0}: Error finding container d13d555e56705c349a7b2841ff19d6829c78c97a7fac5bf1f1519029d4071839: Status 404 returned error can't find the container with id d13d555e56705c349a7b2841ff19d6829c78c97a7fac5bf1f1519029d4071839 Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.353955 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k"] Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.530929 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.543345 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8-webhook-certs\") pod \"openstack-operator-controller-manager-56f6fbdf6-824mz\" (UID: \"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8\") " pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.688587 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-fzw6h" Dec 15 07:10:27 crc kubenswrapper[4876]: I1215 07:10:27.696171 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.073717 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" event={"ID":"5a8894cb-9bb5-403e-9003-28e8f771f142","Type":"ContainerStarted","Data":"ae1d174d47846d0c4fa1abf51e0afd4e34dca8b7f85f868458f7b4ca286ff270"} Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.074545 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.076897 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="4baa74438474f842b95837cb376641c83032001b2fc70604008ed449440b26a2" exitCode=0 Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.076982 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"4baa74438474f842b95837cb376641c83032001b2fc70604008ed449440b26a2"} Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.077008 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"f7ed19e30015229d68cc3e45a01331aa4c6921bd64f97228380c22118f785d84"} Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.077048 4876 scope.go:117] "RemoveContainer" containerID="3a4b809547f569825abd99f382a2c171e3307d4226f6eeeddde69774b94f79df" Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.083488 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" event={"ID":"7bcbfa3a-5871-44db-b07b-2687a995b87f","Type":"ContainerStarted","Data":"c9cc831a5cd218e91d884932ccb50a6a64870ca01a48391d9b32282e6220d314"} Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.083684 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.085575 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" event={"ID":"e7ea8ece-c3d4-46bb-9255-6021533983c4","Type":"ContainerStarted","Data":"c365628da4dbe5f77a51854f5d6c52672d1e564fab5b5365347882a8a8b87357"} Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.087059 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" event={"ID":"970d703f-35b5-41d2-b2b6-fae4c2fba825","Type":"ContainerStarted","Data":"0063d2b5447d40eddadf440a53d1c6f10e609fabad6b48b1cdc767fc1bdbf237"} Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.087162 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.092002 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" event={"ID":"ab87f935-876b-42b5-9eb9-85092cd8068f","Type":"ContainerStarted","Data":"d13d555e56705c349a7b2841ff19d6829c78c97a7fac5bf1f1519029d4071839"} Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.102486 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" podStartSLOduration=3.393210442 podStartE2EDuration="34.102467633s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.779476568 +0000 UTC m=+1122.350619479" lastFinishedPulling="2025-12-15 07:10:27.488733729 +0000 UTC m=+1153.059876670" observedRunningTime="2025-12-15 07:10:28.087718658 +0000 UTC m=+1153.658861569" watchObservedRunningTime="2025-12-15 07:10:28.102467633 +0000 UTC m=+1153.673610564" Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.108390 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" podStartSLOduration=3.636926185 podStartE2EDuration="34.108368378s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.683563384 +0000 UTC m=+1122.254706285" lastFinishedPulling="2025-12-15 07:10:27.155005567 +0000 UTC m=+1152.726148478" observedRunningTime="2025-12-15 07:10:28.100131447 +0000 UTC m=+1153.671274388" watchObservedRunningTime="2025-12-15 07:10:28.108368378 +0000 UTC m=+1153.679511289" Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.138967 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" podStartSLOduration=3.873834587 podStartE2EDuration="34.138948637s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.949513742 +0000 UTC m=+1122.520656653" lastFinishedPulling="2025-12-15 07:10:27.214627792 +0000 UTC m=+1152.785770703" observedRunningTime="2025-12-15 07:10:28.135126649 +0000 UTC m=+1153.706269570" watchObservedRunningTime="2025-12-15 07:10:28.138948637 +0000 UTC m=+1153.710091548" Dec 15 07:10:28 crc kubenswrapper[4876]: I1215 07:10:28.173364 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz"] Dec 15 07:10:29 crc kubenswrapper[4876]: I1215 07:10:29.115980 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" event={"ID":"cb583ab2-3f9a-42e3-afd3-630dd25eb152","Type":"ContainerStarted","Data":"db0a830c89e147e05c794874d69a55915b0427b8bd3b42d3878c34545be7a268"} Dec 15 07:10:29 crc kubenswrapper[4876]: I1215 07:10:29.116788 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" Dec 15 07:10:29 crc kubenswrapper[4876]: I1215 07:10:29.118767 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" event={"ID":"94aa3357-932f-4a07-8c32-32b357938142","Type":"ContainerStarted","Data":"c2f1ba25aea3b97ea954b29697b90b07aeb42ffefeeebd6333f2c972875e6354"} Dec 15 07:10:29 crc kubenswrapper[4876]: I1215 07:10:29.119141 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" Dec 15 07:10:29 crc kubenswrapper[4876]: I1215 07:10:29.123166 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" event={"ID":"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8","Type":"ContainerStarted","Data":"f4c7cf8214a3d56e8e94bc2928be0ebbb1947f48dc0c8d56baa36c2f7594ca6e"} Dec 15 07:10:29 crc kubenswrapper[4876]: I1215 07:10:29.123195 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:10:29 crc kubenswrapper[4876]: I1215 07:10:29.123204 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" event={"ID":"3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8","Type":"ContainerStarted","Data":"aba81f58d516df59ac3164d5afb545bd9b98a8835138cff9b7ed9bdee3b9e0f1"} Dec 15 07:10:29 crc kubenswrapper[4876]: I1215 07:10:29.134392 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" podStartSLOduration=3.46476605 podStartE2EDuration="35.134378139s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.782754249 +0000 UTC m=+1122.353897160" lastFinishedPulling="2025-12-15 07:10:28.452366348 +0000 UTC m=+1154.023509249" observedRunningTime="2025-12-15 07:10:29.130767377 +0000 UTC m=+1154.701910288" watchObservedRunningTime="2025-12-15 07:10:29.134378139 +0000 UTC m=+1154.705521060" Dec 15 07:10:29 crc kubenswrapper[4876]: I1215 07:10:29.146257 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" podStartSLOduration=3.479615386 podStartE2EDuration="35.146238141s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.783053437 +0000 UTC m=+1122.354196348" lastFinishedPulling="2025-12-15 07:10:28.449676192 +0000 UTC m=+1154.020819103" observedRunningTime="2025-12-15 07:10:29.143813134 +0000 UTC m=+1154.714956065" watchObservedRunningTime="2025-12-15 07:10:29.146238141 +0000 UTC m=+1154.717381052" Dec 15 07:10:31 crc kubenswrapper[4876]: I1215 07:10:31.139776 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" event={"ID":"e7ea8ece-c3d4-46bb-9255-6021533983c4","Type":"ContainerStarted","Data":"e2679ce8cb699946888986c1da63beae289270605c6e804a6bac51f368cb3a56"} Dec 15 07:10:31 crc kubenswrapper[4876]: I1215 07:10:31.139923 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:10:31 crc kubenswrapper[4876]: I1215 07:10:31.143181 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" event={"ID":"ab87f935-876b-42b5-9eb9-85092cd8068f","Type":"ContainerStarted","Data":"fef531d5cbb9aa64afb8483f5290c3a4f432277e22e5ba1e651cc5638228c568"} Dec 15 07:10:31 crc kubenswrapper[4876]: I1215 07:10:31.144516 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:10:31 crc kubenswrapper[4876]: I1215 07:10:31.169491 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" podStartSLOduration=33.696530253 podStartE2EDuration="37.169471764s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:10:27.330738482 +0000 UTC m=+1152.901881393" lastFinishedPulling="2025-12-15 07:10:30.803679993 +0000 UTC m=+1156.374822904" observedRunningTime="2025-12-15 07:10:31.166790139 +0000 UTC m=+1156.737933080" watchObservedRunningTime="2025-12-15 07:10:31.169471764 +0000 UTC m=+1156.740614685" Dec 15 07:10:31 crc kubenswrapper[4876]: I1215 07:10:31.171427 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" podStartSLOduration=36.171417499 podStartE2EDuration="36.171417499s" podCreationTimestamp="2025-12-15 07:09:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:10:29.187315146 +0000 UTC m=+1154.758458067" watchObservedRunningTime="2025-12-15 07:10:31.171417499 +0000 UTC m=+1156.742560410" Dec 15 07:10:31 crc kubenswrapper[4876]: I1215 07:10:31.202323 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" podStartSLOduration=33.759179523 podStartE2EDuration="37.202303757s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:10:27.3637996 +0000 UTC m=+1152.934942511" lastFinishedPulling="2025-12-15 07:10:30.806923834 +0000 UTC m=+1156.378066745" observedRunningTime="2025-12-15 07:10:31.193693945 +0000 UTC m=+1156.764836866" watchObservedRunningTime="2025-12-15 07:10:31.202303757 +0000 UTC m=+1156.773446668" Dec 15 07:10:33 crc kubenswrapper[4876]: I1215 07:10:33.177295 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" event={"ID":"ff12b926-5daa-4bd3-b49a-154d80442fa8","Type":"ContainerStarted","Data":"f4769c87ba605f0c565613101159b7418b1d05741086fa4ab5dcd75bf417a77f"} Dec 15 07:10:33 crc kubenswrapper[4876]: I1215 07:10:33.177511 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" Dec 15 07:10:33 crc kubenswrapper[4876]: I1215 07:10:33.202571 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" podStartSLOduration=3.3603581670000002 podStartE2EDuration="39.202546103s" podCreationTimestamp="2025-12-15 07:09:54 +0000 UTC" firstStartedPulling="2025-12-15 07:09:56.6474775 +0000 UTC m=+1122.218620411" lastFinishedPulling="2025-12-15 07:10:32.489665396 +0000 UTC m=+1158.060808347" observedRunningTime="2025-12-15 07:10:33.196842843 +0000 UTC m=+1158.767985754" watchObservedRunningTime="2025-12-15 07:10:33.202546103 +0000 UTC m=+1158.773689064" Dec 15 07:10:35 crc kubenswrapper[4876]: I1215 07:10:35.066813 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-59b8dcb766-8k4s2" Dec 15 07:10:35 crc kubenswrapper[4876]: I1215 07:10:35.067087 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-767f9d7567-tmckd" Dec 15 07:10:35 crc kubenswrapper[4876]: I1215 07:10:35.104092 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6ccf486b9-ndjnl" Dec 15 07:10:35 crc kubenswrapper[4876]: I1215 07:10:35.477447 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-v56p5" Dec 15 07:10:35 crc kubenswrapper[4876]: I1215 07:10:35.493793 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-f76f4954c-7qzqq" Dec 15 07:10:35 crc kubenswrapper[4876]: I1215 07:10:35.542740 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-dh78v" Dec 15 07:10:35 crc kubenswrapper[4876]: I1215 07:10:35.545569 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-8665b56d78-8br7q" Dec 15 07:10:35 crc kubenswrapper[4876]: I1215 07:10:35.601616 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5c6df8f9-cj8nd" Dec 15 07:10:36 crc kubenswrapper[4876]: I1215 07:10:36.891689 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-58944d7758-m5r6k" Dec 15 07:10:37 crc kubenswrapper[4876]: I1215 07:10:37.052292 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-689f887b54x442j" Dec 15 07:10:37 crc kubenswrapper[4876]: I1215 07:10:37.705804 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-56f6fbdf6-824mz" Dec 15 07:10:45 crc kubenswrapper[4876]: I1215 07:10:45.431247 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5c7cbf548f-454ft" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.180230 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-2xhbs"] Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.181732 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.184495 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.184511 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.184523 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.184910 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-xmdk6" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.201927 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-2xhbs"] Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.228851 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-zk5h5"] Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.230190 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.231808 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.239987 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-zk5h5"] Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.275118 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wncf\" (UniqueName: \"kubernetes.io/projected/e825f92c-ac92-4044-a359-ec6ed204957b-kube-api-access-9wncf\") pod \"dnsmasq-dns-84bb9d8bd9-2xhbs\" (UID: \"e825f92c-ac92-4044-a359-ec6ed204957b\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.275253 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e825f92c-ac92-4044-a359-ec6ed204957b-config\") pod \"dnsmasq-dns-84bb9d8bd9-2xhbs\" (UID: \"e825f92c-ac92-4044-a359-ec6ed204957b\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.376341 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-config\") pod \"dnsmasq-dns-5f854695bc-zk5h5\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.376405 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd8wt\" (UniqueName: \"kubernetes.io/projected/b904ce6e-76b3-4402-827f-501b94d7b515-kube-api-access-hd8wt\") pod \"dnsmasq-dns-5f854695bc-zk5h5\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.376481 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-dns-svc\") pod \"dnsmasq-dns-5f854695bc-zk5h5\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.376525 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e825f92c-ac92-4044-a359-ec6ed204957b-config\") pod \"dnsmasq-dns-84bb9d8bd9-2xhbs\" (UID: \"e825f92c-ac92-4044-a359-ec6ed204957b\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.376564 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wncf\" (UniqueName: \"kubernetes.io/projected/e825f92c-ac92-4044-a359-ec6ed204957b-kube-api-access-9wncf\") pod \"dnsmasq-dns-84bb9d8bd9-2xhbs\" (UID: \"e825f92c-ac92-4044-a359-ec6ed204957b\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.377671 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e825f92c-ac92-4044-a359-ec6ed204957b-config\") pod \"dnsmasq-dns-84bb9d8bd9-2xhbs\" (UID: \"e825f92c-ac92-4044-a359-ec6ed204957b\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.398516 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wncf\" (UniqueName: \"kubernetes.io/projected/e825f92c-ac92-4044-a359-ec6ed204957b-kube-api-access-9wncf\") pod \"dnsmasq-dns-84bb9d8bd9-2xhbs\" (UID: \"e825f92c-ac92-4044-a359-ec6ed204957b\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.477865 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-dns-svc\") pod \"dnsmasq-dns-5f854695bc-zk5h5\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.478042 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-config\") pod \"dnsmasq-dns-5f854695bc-zk5h5\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.478156 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd8wt\" (UniqueName: \"kubernetes.io/projected/b904ce6e-76b3-4402-827f-501b94d7b515-kube-api-access-hd8wt\") pod \"dnsmasq-dns-5f854695bc-zk5h5\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.480017 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-dns-svc\") pod \"dnsmasq-dns-5f854695bc-zk5h5\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.480136 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-config\") pod \"dnsmasq-dns-5f854695bc-zk5h5\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.496572 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.500175 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd8wt\" (UniqueName: \"kubernetes.io/projected/b904ce6e-76b3-4402-827f-501b94d7b515-kube-api-access-hd8wt\") pod \"dnsmasq-dns-5f854695bc-zk5h5\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.551159 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.959583 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-2xhbs"] Dec 15 07:10:58 crc kubenswrapper[4876]: I1215 07:10:58.997662 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-zk5h5"] Dec 15 07:10:59 crc kubenswrapper[4876]: W1215 07:10:59.001075 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb904ce6e_76b3_4402_827f_501b94d7b515.slice/crio-9fe054e97a40a23cce49d96ce7e151bea5fc9057d04e4594c0b3f5ba9b6d2e48 WatchSource:0}: Error finding container 9fe054e97a40a23cce49d96ce7e151bea5fc9057d04e4594c0b3f5ba9b6d2e48: Status 404 returned error can't find the container with id 9fe054e97a40a23cce49d96ce7e151bea5fc9057d04e4594c0b3f5ba9b6d2e48 Dec 15 07:10:59 crc kubenswrapper[4876]: I1215 07:10:59.367304 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" event={"ID":"b904ce6e-76b3-4402-827f-501b94d7b515","Type":"ContainerStarted","Data":"9fe054e97a40a23cce49d96ce7e151bea5fc9057d04e4594c0b3f5ba9b6d2e48"} Dec 15 07:10:59 crc kubenswrapper[4876]: I1215 07:10:59.368944 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" event={"ID":"e825f92c-ac92-4044-a359-ec6ed204957b","Type":"ContainerStarted","Data":"4c4f37d241b8cd4799bf9141d025dbb3553e5c0a63f21e5e13222bffdb4099c1"} Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.195325 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-zk5h5"] Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.219716 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-xvmw7"] Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.222647 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-config\") pod \"dnsmasq-dns-744ffd65bc-xvmw7\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.222705 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-xvmw7\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.222734 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gsl5\" (UniqueName: \"kubernetes.io/projected/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-kube-api-access-6gsl5\") pod \"dnsmasq-dns-744ffd65bc-xvmw7\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.222847 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.229233 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-xvmw7"] Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.332436 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-config\") pod \"dnsmasq-dns-744ffd65bc-xvmw7\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.332515 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-xvmw7\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.332553 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gsl5\" (UniqueName: \"kubernetes.io/projected/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-kube-api-access-6gsl5\") pod \"dnsmasq-dns-744ffd65bc-xvmw7\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.333677 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-config\") pod \"dnsmasq-dns-744ffd65bc-xvmw7\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.334179 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-xvmw7\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.371449 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gsl5\" (UniqueName: \"kubernetes.io/projected/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-kube-api-access-6gsl5\") pod \"dnsmasq-dns-744ffd65bc-xvmw7\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.574555 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-2xhbs"] Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.590042 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.609514 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-5l6xc"] Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.613547 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.642779 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-5l6xc"] Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.643558 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zb6v\" (UniqueName: \"kubernetes.io/projected/4b653a98-0379-49c3-afc5-3ab4901d742e-kube-api-access-9zb6v\") pod \"dnsmasq-dns-95f5f6995-5l6xc\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.643627 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-config\") pod \"dnsmasq-dns-95f5f6995-5l6xc\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.643731 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-dns-svc\") pod \"dnsmasq-dns-95f5f6995-5l6xc\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.745098 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-dns-svc\") pod \"dnsmasq-dns-95f5f6995-5l6xc\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.745178 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zb6v\" (UniqueName: \"kubernetes.io/projected/4b653a98-0379-49c3-afc5-3ab4901d742e-kube-api-access-9zb6v\") pod \"dnsmasq-dns-95f5f6995-5l6xc\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.745205 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-config\") pod \"dnsmasq-dns-95f5f6995-5l6xc\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.746434 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-config\") pod \"dnsmasq-dns-95f5f6995-5l6xc\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.746497 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-dns-svc\") pod \"dnsmasq-dns-95f5f6995-5l6xc\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.770568 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zb6v\" (UniqueName: \"kubernetes.io/projected/4b653a98-0379-49c3-afc5-3ab4901d742e-kube-api-access-9zb6v\") pod \"dnsmasq-dns-95f5f6995-5l6xc\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:01 crc kubenswrapper[4876]: I1215 07:11:01.989427 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.165152 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-xvmw7"] Dec 15 07:11:02 crc kubenswrapper[4876]: W1215 07:11:02.177263 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdb4f16e_3e56_422b_8a47_087e3f7fb2d4.slice/crio-32090e38847bb775a1002c51bbed517b7cb063ba4136e6b7084bbc3bdf6c99cc WatchSource:0}: Error finding container 32090e38847bb775a1002c51bbed517b7cb063ba4136e6b7084bbc3bdf6c99cc: Status 404 returned error can't find the container with id 32090e38847bb775a1002c51bbed517b7cb063ba4136e6b7084bbc3bdf6c99cc Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.383230 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.385450 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.388545 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.388675 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-r8sv9" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.388846 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.389004 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.389921 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.390053 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.390205 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.401301 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.442486 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" event={"ID":"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4","Type":"ContainerStarted","Data":"32090e38847bb775a1002c51bbed517b7cb063ba4136e6b7084bbc3bdf6c99cc"} Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.466956 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-5l6xc"] Dec 15 07:11:02 crc kubenswrapper[4876]: W1215 07:11:02.473470 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b653a98_0379_49c3_afc5_3ab4901d742e.slice/crio-2b9351b1f40f480e18cb79d70ab43f161535943160d70cb161a7288dafc25c78 WatchSource:0}: Error finding container 2b9351b1f40f480e18cb79d70ab43f161535943160d70cb161a7288dafc25c78: Status 404 returned error can't find the container with id 2b9351b1f40f480e18cb79d70ab43f161535943160d70cb161a7288dafc25c78 Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557342 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557416 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557467 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557491 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/db3f4964-0cca-4527-93de-457292de4be7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557520 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557544 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557565 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557609 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557639 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m725c\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-kube-api-access-m725c\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557676 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/db3f4964-0cca-4527-93de-457292de4be7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.557696 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659210 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659265 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m725c\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-kube-api-access-m725c\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659299 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/db3f4964-0cca-4527-93de-457292de4be7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659312 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659340 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659370 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659403 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/db3f4964-0cca-4527-93de-457292de4be7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659418 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659437 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659452 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.659469 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.660542 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.660997 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.661119 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.661315 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.661634 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.662048 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.669361 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/db3f4964-0cca-4527-93de-457292de4be7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.682801 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.685380 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.699450 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/db3f4964-0cca-4527-93de-457292de4be7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.709090 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m725c\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-kube-api-access-m725c\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.718491 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.745898 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.776531 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.779352 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.779476 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.787290 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.789152 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.789358 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.789468 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-n676z" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.789571 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.789669 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.790247 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.963982 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.964389 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.964425 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.964469 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.964495 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.964625 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.964689 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.964738 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.965142 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtrfs\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-kube-api-access-qtrfs\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.965189 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:02 crc kubenswrapper[4876]: I1215 07:11:02.965227 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.066680 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.066744 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.066778 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.066840 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.066879 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.066930 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.066966 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.066998 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.067044 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtrfs\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-kube-api-access-qtrfs\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.067076 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.067126 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.067707 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.068278 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.068800 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.068969 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.069220 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.070391 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.072452 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.072636 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.077809 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.085881 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.086287 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtrfs\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-kube-api-access-qtrfs\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.096448 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.114022 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.347865 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.460027 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" event={"ID":"4b653a98-0379-49c3-afc5-3ab4901d742e","Type":"ContainerStarted","Data":"2b9351b1f40f480e18cb79d70ab43f161535943160d70cb161a7288dafc25c78"} Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.805720 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.807270 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.813545 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-q4blc" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.813778 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.813931 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.814938 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.822310 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.843196 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.916114 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnc4x\" (UniqueName: \"kubernetes.io/projected/8a65a162-9f80-4d1c-be2c-001dedcb5391-kube-api-access-vnc4x\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.916394 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-kolla-config\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.916496 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.917162 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.917193 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.917257 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.917284 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-default\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:03 crc kubenswrapper[4876]: I1215 07:11:03.917439 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.022003 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.022052 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-default\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.022171 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.022255 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnc4x\" (UniqueName: \"kubernetes.io/projected/8a65a162-9f80-4d1c-be2c-001dedcb5391-kube-api-access-vnc4x\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.022320 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-kolla-config\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.022358 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.022407 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.022429 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.023137 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.023338 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-operator-scripts\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.024664 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-kolla-config\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.028281 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-default\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.032947 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.036751 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-generated\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.049530 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.052924 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnc4x\" (UniqueName: \"kubernetes.io/projected/8a65a162-9f80-4d1c-be2c-001dedcb5391-kube-api-access-vnc4x\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.053430 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " pod="openstack/openstack-galera-0" Dec 15 07:11:04 crc kubenswrapper[4876]: I1215 07:11:04.227162 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.362422 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.364292 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.367181 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.369629 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-zszzl" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.370182 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.371484 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.373041 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.421171 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.421219 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.421253 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.421311 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.421345 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6cz5\" (UniqueName: \"kubernetes.io/projected/1503039d-0445-46ac-81ca-5af528a46ce2-kube-api-access-h6cz5\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.421385 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.421423 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.421470 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.522235 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.522281 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.522628 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.522674 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.522702 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.522702 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.522733 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.522861 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.522934 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.522944 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6cz5\" (UniqueName: \"kubernetes.io/projected/1503039d-0445-46ac-81ca-5af528a46ce2-kube-api-access-h6cz5\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.525002 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.525271 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.529832 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.532420 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.546927 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6cz5\" (UniqueName: \"kubernetes.io/projected/1503039d-0445-46ac-81ca-5af528a46ce2-kube-api-access-h6cz5\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.547241 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.553351 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.617564 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.618461 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.626687 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-xlvgn" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.627004 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.627289 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.630700 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.682000 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.728963 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjvhq\" (UniqueName: \"kubernetes.io/projected/dd0403aa-07db-43b1-8df6-6317130cbd53-kube-api-access-gjvhq\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.729051 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-config-data\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.729138 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-kolla-config\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.729179 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-memcached-tls-certs\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.729218 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-combined-ca-bundle\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.830743 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-kolla-config\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.830800 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-memcached-tls-certs\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.830832 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-combined-ca-bundle\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.830949 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-config-data\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.830972 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjvhq\" (UniqueName: \"kubernetes.io/projected/dd0403aa-07db-43b1-8df6-6317130cbd53-kube-api-access-gjvhq\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.831647 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-kolla-config\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.831973 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-config-data\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.834047 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-memcached-tls-certs\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.835397 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-combined-ca-bundle\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.850172 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjvhq\" (UniqueName: \"kubernetes.io/projected/dd0403aa-07db-43b1-8df6-6317130cbd53-kube-api-access-gjvhq\") pod \"memcached-0\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " pod="openstack/memcached-0" Dec 15 07:11:05 crc kubenswrapper[4876]: I1215 07:11:05.934635 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 15 07:11:07 crc kubenswrapper[4876]: I1215 07:11:07.463608 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:11:07 crc kubenswrapper[4876]: I1215 07:11:07.465213 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 15 07:11:07 crc kubenswrapper[4876]: I1215 07:11:07.467341 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-rzrv7" Dec 15 07:11:07 crc kubenswrapper[4876]: I1215 07:11:07.471960 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:11:07 crc kubenswrapper[4876]: I1215 07:11:07.556173 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87vhp\" (UniqueName: \"kubernetes.io/projected/2860f415-69bd-417c-a89d-00515806360b-kube-api-access-87vhp\") pod \"kube-state-metrics-0\" (UID: \"2860f415-69bd-417c-a89d-00515806360b\") " pod="openstack/kube-state-metrics-0" Dec 15 07:11:07 crc kubenswrapper[4876]: I1215 07:11:07.657221 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87vhp\" (UniqueName: \"kubernetes.io/projected/2860f415-69bd-417c-a89d-00515806360b-kube-api-access-87vhp\") pod \"kube-state-metrics-0\" (UID: \"2860f415-69bd-417c-a89d-00515806360b\") " pod="openstack/kube-state-metrics-0" Dec 15 07:11:07 crc kubenswrapper[4876]: I1215 07:11:07.677053 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87vhp\" (UniqueName: \"kubernetes.io/projected/2860f415-69bd-417c-a89d-00515806360b-kube-api-access-87vhp\") pod \"kube-state-metrics-0\" (UID: \"2860f415-69bd-417c-a89d-00515806360b\") " pod="openstack/kube-state-metrics-0" Dec 15 07:11:07 crc kubenswrapper[4876]: I1215 07:11:07.784277 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 15 07:11:08 crc kubenswrapper[4876]: I1215 07:11:08.526169 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"db3f4964-0cca-4527-93de-457292de4be7","Type":"ContainerStarted","Data":"38358edb6ed8d461080eae3715ff7851a148aed9711f8e564697fb2e343f3648"} Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.655524 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-pr47l"] Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.657793 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.683232 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.683933 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.690133 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-hsfgw" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.696989 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-pr47l"] Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.726646 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-gbp2z"] Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.728120 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-gbp2z"] Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.728206 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.730942 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/df0aba13-046d-4950-bfa3-c873c535847f-scripts\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.731127 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.731223 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-ovn-controller-tls-certs\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.731647 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run-ovn\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.731773 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv942\" (UniqueName: \"kubernetes.io/projected/df0aba13-046d-4950-bfa3-c873c535847f-kube-api-access-bv942\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.731801 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-combined-ca-bundle\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.731838 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-log-ovn\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833656 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv942\" (UniqueName: \"kubernetes.io/projected/df0aba13-046d-4950-bfa3-c873c535847f-kube-api-access-bv942\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833701 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-combined-ca-bundle\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833726 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-log-ovn\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833760 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-log\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833819 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-lib\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833850 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/df0aba13-046d-4950-bfa3-c873c535847f-scripts\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833891 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vwks\" (UniqueName: \"kubernetes.io/projected/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-kube-api-access-7vwks\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833913 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833930 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-ovn-controller-tls-certs\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833949 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-run\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833968 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-scripts\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.833984 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-etc-ovs\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.834051 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run-ovn\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.834584 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-log-ovn\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.834599 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.834675 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run-ovn\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.838063 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/df0aba13-046d-4950-bfa3-c873c535847f-scripts\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.881659 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-combined-ca-bundle\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.883597 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv942\" (UniqueName: \"kubernetes.io/projected/df0aba13-046d-4950-bfa3-c873c535847f-kube-api-access-bv942\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.884122 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-ovn-controller-tls-certs\") pod \"ovn-controller-pr47l\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " pod="openstack/ovn-controller-pr47l" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.935341 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-log\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.935392 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-lib\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.935462 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vwks\" (UniqueName: \"kubernetes.io/projected/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-kube-api-access-7vwks\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.935496 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-run\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.935527 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-scripts\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.935552 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-etc-ovs\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.936981 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-log\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.937027 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-run\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.937216 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-lib\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.937223 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-etc-ovs\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.938942 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-scripts\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:10 crc kubenswrapper[4876]: I1215 07:11:10.957544 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vwks\" (UniqueName: \"kubernetes.io/projected/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-kube-api-access-7vwks\") pod \"ovn-controller-ovs-gbp2z\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.014381 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pr47l" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.045844 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.544479 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.549082 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.552375 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.554334 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-mvpbq" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.554493 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.554683 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.558309 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.560174 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.645351 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.645470 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.645495 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.645523 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-config\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.645548 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.645649 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.645691 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.645717 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmzm4\" (UniqueName: \"kubernetes.io/projected/189a34a7-d451-4c26-b84e-5b056fe1e93b-kube-api-access-zmzm4\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.747434 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.747575 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.747629 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.747668 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-config\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.747705 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.747791 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.747848 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.747881 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmzm4\" (UniqueName: \"kubernetes.io/projected/189a34a7-d451-4c26-b84e-5b056fe1e93b-kube-api-access-zmzm4\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.747946 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.748991 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.750422 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-config\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.755885 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.756610 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.756999 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.765025 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmzm4\" (UniqueName: \"kubernetes.io/projected/189a34a7-d451-4c26-b84e-5b056fe1e93b-kube-api-access-zmzm4\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.774640 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.802851 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:11 crc kubenswrapper[4876]: I1215 07:11:11.885434 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.555828 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.559365 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.562236 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.562556 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.564037 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-qwbbh" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.571598 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.580042 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.707908 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49bdz\" (UniqueName: \"kubernetes.io/projected/8fc3485b-9f78-40d0-b864-b40626fdba7c-kube-api-access-49bdz\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.707958 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.707993 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.708024 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.708085 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.708128 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-config\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.708180 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.708197 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.809995 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.810045 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.810075 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49bdz\" (UniqueName: \"kubernetes.io/projected/8fc3485b-9f78-40d0-b864-b40626fdba7c-kube-api-access-49bdz\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.810146 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.810187 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.810220 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.810276 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.810303 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-config\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.812733 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.813433 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.815242 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.815485 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.815858 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.821478 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-config\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.825986 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.826309 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.827877 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.827944 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.831056 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.851381 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49bdz\" (UniqueName: \"kubernetes.io/projected/8fc3485b-9f78-40d0-b864-b40626fdba7c-kube-api-access-49bdz\") pod \"ovsdbserver-sb-0\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.888858 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-qwbbh" Dec 15 07:11:14 crc kubenswrapper[4876]: I1215 07:11:14.897414 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:21 crc kubenswrapper[4876]: E1215 07:11:21.531048 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Dec 15 07:11:21 crc kubenswrapper[4876]: E1215 07:11:21.531720 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9wncf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-84bb9d8bd9-2xhbs_openstack(e825f92c-ac92-4044-a359-ec6ed204957b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:11:21 crc kubenswrapper[4876]: E1215 07:11:21.533213 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" podUID="e825f92c-ac92-4044-a359-ec6ed204957b" Dec 15 07:11:21 crc kubenswrapper[4876]: E1215 07:11:21.535754 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Dec 15 07:11:21 crc kubenswrapper[4876]: E1215 07:11:21.535944 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6gsl5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-744ffd65bc-xvmw7_openstack(fdb4f16e-3e56-422b-8a47-087e3f7fb2d4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:11:21 crc kubenswrapper[4876]: E1215 07:11:21.537587 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" podUID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" Dec 15 07:11:21 crc kubenswrapper[4876]: E1215 07:11:21.621469 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33\\\"\"" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" podUID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" Dec 15 07:11:23 crc kubenswrapper[4876]: E1215 07:11:23.447014 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Dec 15 07:11:23 crc kubenswrapper[4876]: E1215 07:11:23.447466 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hd8wt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5f854695bc-zk5h5_openstack(b904ce6e-76b3-4402-827f-501b94d7b515): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:11:23 crc kubenswrapper[4876]: E1215 07:11:23.449459 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" podUID="b904ce6e-76b3-4402-827f-501b94d7b515" Dec 15 07:11:23 crc kubenswrapper[4876]: E1215 07:11:23.482286 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Dec 15 07:11:23 crc kubenswrapper[4876]: E1215 07:11:23.482630 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9zb6v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-95f5f6995-5l6xc_openstack(4b653a98-0379-49c3-afc5-3ab4901d742e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:11:23 crc kubenswrapper[4876]: E1215 07:11:23.490869 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" podUID="4b653a98-0379-49c3-afc5-3ab4901d742e" Dec 15 07:11:23 crc kubenswrapper[4876]: I1215 07:11:23.593316 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" Dec 15 07:11:23 crc kubenswrapper[4876]: I1215 07:11:23.673834 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wncf\" (UniqueName: \"kubernetes.io/projected/e825f92c-ac92-4044-a359-ec6ed204957b-kube-api-access-9wncf\") pod \"e825f92c-ac92-4044-a359-ec6ed204957b\" (UID: \"e825f92c-ac92-4044-a359-ec6ed204957b\") " Dec 15 07:11:23 crc kubenswrapper[4876]: I1215 07:11:23.673933 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e825f92c-ac92-4044-a359-ec6ed204957b-config\") pod \"e825f92c-ac92-4044-a359-ec6ed204957b\" (UID: \"e825f92c-ac92-4044-a359-ec6ed204957b\") " Dec 15 07:11:23 crc kubenswrapper[4876]: I1215 07:11:23.677662 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e825f92c-ac92-4044-a359-ec6ed204957b-config" (OuterVolumeSpecName: "config") pod "e825f92c-ac92-4044-a359-ec6ed204957b" (UID: "e825f92c-ac92-4044-a359-ec6ed204957b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:23 crc kubenswrapper[4876]: I1215 07:11:23.688610 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" Dec 15 07:11:23 crc kubenswrapper[4876]: I1215 07:11:23.688778 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-2xhbs" event={"ID":"e825f92c-ac92-4044-a359-ec6ed204957b","Type":"ContainerDied","Data":"4c4f37d241b8cd4799bf9141d025dbb3553e5c0a63f21e5e13222bffdb4099c1"} Dec 15 07:11:23 crc kubenswrapper[4876]: I1215 07:11:23.690053 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e825f92c-ac92-4044-a359-ec6ed204957b-kube-api-access-9wncf" (OuterVolumeSpecName: "kube-api-access-9wncf") pod "e825f92c-ac92-4044-a359-ec6ed204957b" (UID: "e825f92c-ac92-4044-a359-ec6ed204957b"). InnerVolumeSpecName "kube-api-access-9wncf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:11:23 crc kubenswrapper[4876]: E1215 07:11:23.696879 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33\\\"\"" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" podUID="4b653a98-0379-49c3-afc5-3ab4901d742e" Dec 15 07:11:23 crc kubenswrapper[4876]: I1215 07:11:23.775670 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e825f92c-ac92-4044-a359-ec6ed204957b-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:23 crc kubenswrapper[4876]: I1215 07:11:23.775966 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wncf\" (UniqueName: \"kubernetes.io/projected/e825f92c-ac92-4044-a359-ec6ed204957b-kube-api-access-9wncf\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.038413 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.104353 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-2xhbs"] Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.112267 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-2xhbs"] Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.286456 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.389527 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-config\") pod \"b904ce6e-76b3-4402-827f-501b94d7b515\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.389670 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hd8wt\" (UniqueName: \"kubernetes.io/projected/b904ce6e-76b3-4402-827f-501b94d7b515-kube-api-access-hd8wt\") pod \"b904ce6e-76b3-4402-827f-501b94d7b515\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.389697 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-dns-svc\") pod \"b904ce6e-76b3-4402-827f-501b94d7b515\" (UID: \"b904ce6e-76b3-4402-827f-501b94d7b515\") " Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.390002 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-config" (OuterVolumeSpecName: "config") pod "b904ce6e-76b3-4402-827f-501b94d7b515" (UID: "b904ce6e-76b3-4402-827f-501b94d7b515"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.390418 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b904ce6e-76b3-4402-827f-501b94d7b515" (UID: "b904ce6e-76b3-4402-827f-501b94d7b515"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.403212 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b904ce6e-76b3-4402-827f-501b94d7b515-kube-api-access-hd8wt" (OuterVolumeSpecName: "kube-api-access-hd8wt") pod "b904ce6e-76b3-4402-827f-501b94d7b515" (UID: "b904ce6e-76b3-4402-827f-501b94d7b515"). InnerVolumeSpecName "kube-api-access-hd8wt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.424009 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.460385 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:11:24 crc kubenswrapper[4876]: W1215 07:11:24.466484 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2860f415_69bd_417c_a89d_00515806360b.slice/crio-218fbf7bf49be07d8362991d22a4df5766923edbf73e3d27104c3fa7937c7944 WatchSource:0}: Error finding container 218fbf7bf49be07d8362991d22a4df5766923edbf73e3d27104c3fa7937c7944: Status 404 returned error can't find the container with id 218fbf7bf49be07d8362991d22a4df5766923edbf73e3d27104c3fa7937c7944 Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.482800 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.491643 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.491753 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hd8wt\" (UniqueName: \"kubernetes.io/projected/b904ce6e-76b3-4402-827f-501b94d7b515-kube-api-access-hd8wt\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.491831 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b904ce6e-76b3-4402-827f-501b94d7b515-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.649822 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-pr47l"] Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.660782 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.694282 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"dd0403aa-07db-43b1-8df6-6317130cbd53","Type":"ContainerStarted","Data":"11d6ca3e7efc70b6417baa19734580193698a947f639b46c4ed43c34cf18c17d"} Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.696439 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2860f415-69bd-417c-a89d-00515806360b","Type":"ContainerStarted","Data":"218fbf7bf49be07d8362991d22a4df5766923edbf73e3d27104c3fa7937c7944"} Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.703352 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8a65a162-9f80-4d1c-be2c-001dedcb5391","Type":"ContainerStarted","Data":"08e1864a7cf2d74fc6e6349c0836d270dd02e0735a8de0b438415c24433a1c6b"} Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.724444 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.725510 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e825f92c-ac92-4044-a359-ec6ed204957b" path="/var/lib/kubelet/pods/e825f92c-ac92-4044-a359-ec6ed204957b/volumes" Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.725923 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-zk5h5" event={"ID":"b904ce6e-76b3-4402-827f-501b94d7b515","Type":"ContainerDied","Data":"9fe054e97a40a23cce49d96ce7e151bea5fc9057d04e4594c0b3f5ba9b6d2e48"} Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.725954 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pr47l" event={"ID":"df0aba13-046d-4950-bfa3-c873c535847f","Type":"ContainerStarted","Data":"1595e26b4d2f02ef6954d967db7d10ff60f6e5959b4cdf59901ea1ce3b924c93"} Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.725964 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1503039d-0445-46ac-81ca-5af528a46ce2","Type":"ContainerStarted","Data":"7f3dd8520eb7cc36569d304f7135a2b54c44cbd7a13dbaf1858a9ca184329258"} Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.725973 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2a1020a1-7afe-46ee-b5c4-40a9290a05e1","Type":"ContainerStarted","Data":"646c1828ce0e55cffe3ebf28236178105e80559b0ade6662412e0643eb8104fe"} Dec 15 07:11:24 crc kubenswrapper[4876]: W1215 07:11:24.745156 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2f6914b_2d26_4417_a7c0_21eaf29a18bf.slice/crio-26e406bd663fd61e4936f5eddbf56ff2545cfa4800c5f7935b314d6a4a0f68ca WatchSource:0}: Error finding container 26e406bd663fd61e4936f5eddbf56ff2545cfa4800c5f7935b314d6a4a0f68ca: Status 404 returned error can't find the container with id 26e406bd663fd61e4936f5eddbf56ff2545cfa4800c5f7935b314d6a4a0f68ca Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.751097 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-gbp2z"] Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.857984 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 15 07:11:24 crc kubenswrapper[4876]: W1215 07:11:24.859712 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fc3485b_9f78_40d0_b864_b40626fdba7c.slice/crio-6b96d651c4b9c1327100dfb83be8a8e30f1e39e1bb7266e282afd7bbcc80ac6b WatchSource:0}: Error finding container 6b96d651c4b9c1327100dfb83be8a8e30f1e39e1bb7266e282afd7bbcc80ac6b: Status 404 returned error can't find the container with id 6b96d651c4b9c1327100dfb83be8a8e30f1e39e1bb7266e282afd7bbcc80ac6b Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.890094 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-zk5h5"] Dec 15 07:11:24 crc kubenswrapper[4876]: I1215 07:11:24.898227 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-zk5h5"] Dec 15 07:11:25 crc kubenswrapper[4876]: I1215 07:11:25.513602 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 15 07:11:25 crc kubenswrapper[4876]: I1215 07:11:25.718945 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8fc3485b-9f78-40d0-b864-b40626fdba7c","Type":"ContainerStarted","Data":"6b96d651c4b9c1327100dfb83be8a8e30f1e39e1bb7266e282afd7bbcc80ac6b"} Dec 15 07:11:25 crc kubenswrapper[4876]: I1215 07:11:25.720608 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gbp2z" event={"ID":"b2f6914b-2d26-4417-a7c0-21eaf29a18bf","Type":"ContainerStarted","Data":"26e406bd663fd61e4936f5eddbf56ff2545cfa4800c5f7935b314d6a4a0f68ca"} Dec 15 07:11:25 crc kubenswrapper[4876]: I1215 07:11:25.721661 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"189a34a7-d451-4c26-b84e-5b056fe1e93b","Type":"ContainerStarted","Data":"41798fc8e0958c48ed2f437a30c506cfe792d8e998ed9822c9c11928156963fe"} Dec 15 07:11:26 crc kubenswrapper[4876]: I1215 07:11:26.716990 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b904ce6e-76b3-4402-827f-501b94d7b515" path="/var/lib/kubelet/pods/b904ce6e-76b3-4402-827f-501b94d7b515/volumes" Dec 15 07:11:28 crc kubenswrapper[4876]: I1215 07:11:28.752228 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2a1020a1-7afe-46ee-b5c4-40a9290a05e1","Type":"ContainerStarted","Data":"654aa2b251a3e8d15baaeefdef4e09e6982ddf90bc92e310071d54b23d02d13f"} Dec 15 07:11:28 crc kubenswrapper[4876]: I1215 07:11:28.755190 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"db3f4964-0cca-4527-93de-457292de4be7","Type":"ContainerStarted","Data":"8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f"} Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.862357 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8a65a162-9f80-4d1c-be2c-001dedcb5391","Type":"ContainerStarted","Data":"8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207"} Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.890428 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"189a34a7-d451-4c26-b84e-5b056fe1e93b","Type":"ContainerStarted","Data":"2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721"} Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.902599 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pr47l" event={"ID":"df0aba13-046d-4950-bfa3-c873c535847f","Type":"ContainerStarted","Data":"d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489"} Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.903474 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-pr47l" Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.924281 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8fc3485b-9f78-40d0-b864-b40626fdba7c","Type":"ContainerStarted","Data":"10e3e7fbcb8c5a26a5888a2b01f66cead7b43ca5c9e509f9062271dd99393b62"} Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.942337 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1503039d-0445-46ac-81ca-5af528a46ce2","Type":"ContainerStarted","Data":"4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac"} Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.943909 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-pr47l" podStartSLOduration=16.289749308 podStartE2EDuration="25.943888173s" podCreationTimestamp="2025-12-15 07:11:10 +0000 UTC" firstStartedPulling="2025-12-15 07:11:24.679387375 +0000 UTC m=+1210.250530286" lastFinishedPulling="2025-12-15 07:11:34.33352624 +0000 UTC m=+1219.904669151" observedRunningTime="2025-12-15 07:11:35.934373069 +0000 UTC m=+1221.505515970" watchObservedRunningTime="2025-12-15 07:11:35.943888173 +0000 UTC m=+1221.515031094" Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.962265 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"dd0403aa-07db-43b1-8df6-6317130cbd53","Type":"ContainerStarted","Data":"90ed53ef014d70db18f442d59174c6772f199e8d4213675027ef5c79f6be1513"} Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.963541 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.970449 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2860f415-69bd-417c-a89d-00515806360b","Type":"ContainerStarted","Data":"1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4"} Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.971554 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.987391 4876 generic.go:334] "Generic (PLEG): container finished" podID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" containerID="01785db434663a22831e397d07cd68cc554909e3d864d7647440f3f76ca00722" exitCode=0 Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.987459 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" event={"ID":"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4","Type":"ContainerDied","Data":"01785db434663a22831e397d07cd68cc554909e3d864d7647440f3f76ca00722"} Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.994362 4876 generic.go:334] "Generic (PLEG): container finished" podID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerID="fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649" exitCode=0 Dec 15 07:11:35 crc kubenswrapper[4876]: I1215 07:11:35.994403 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gbp2z" event={"ID":"b2f6914b-2d26-4417-a7c0-21eaf29a18bf","Type":"ContainerDied","Data":"fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649"} Dec 15 07:11:36 crc kubenswrapper[4876]: I1215 07:11:36.005193 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=21.574836441 podStartE2EDuration="31.005177112s" podCreationTimestamp="2025-12-15 07:11:05 +0000 UTC" firstStartedPulling="2025-12-15 07:11:24.429454496 +0000 UTC m=+1210.000597407" lastFinishedPulling="2025-12-15 07:11:33.859795167 +0000 UTC m=+1219.430938078" observedRunningTime="2025-12-15 07:11:36.00364903 +0000 UTC m=+1221.574791951" watchObservedRunningTime="2025-12-15 07:11:36.005177112 +0000 UTC m=+1221.576320023" Dec 15 07:11:36 crc kubenswrapper[4876]: I1215 07:11:36.030468 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=18.686288629 podStartE2EDuration="29.030449913s" podCreationTimestamp="2025-12-15 07:11:07 +0000 UTC" firstStartedPulling="2025-12-15 07:11:24.471475521 +0000 UTC m=+1210.042618432" lastFinishedPulling="2025-12-15 07:11:34.815636805 +0000 UTC m=+1220.386779716" observedRunningTime="2025-12-15 07:11:36.025592318 +0000 UTC m=+1221.596735229" watchObservedRunningTime="2025-12-15 07:11:36.030449913 +0000 UTC m=+1221.601592824" Dec 15 07:11:37 crc kubenswrapper[4876]: I1215 07:11:37.023515 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" event={"ID":"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4","Type":"ContainerStarted","Data":"baae8b8f1ade9e1c7e7e227a5b8653658e1a84b2fd76d49bfaf80366220402dc"} Dec 15 07:11:37 crc kubenswrapper[4876]: I1215 07:11:37.024693 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:37 crc kubenswrapper[4876]: I1215 07:11:37.028505 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gbp2z" event={"ID":"b2f6914b-2d26-4417-a7c0-21eaf29a18bf","Type":"ContainerStarted","Data":"d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093"} Dec 15 07:11:37 crc kubenswrapper[4876]: I1215 07:11:37.028540 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gbp2z" event={"ID":"b2f6914b-2d26-4417-a7c0-21eaf29a18bf","Type":"ContainerStarted","Data":"63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a"} Dec 15 07:11:37 crc kubenswrapper[4876]: I1215 07:11:37.057392 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" podStartSLOduration=2.948588921 podStartE2EDuration="36.057374001s" podCreationTimestamp="2025-12-15 07:11:01 +0000 UTC" firstStartedPulling="2025-12-15 07:11:02.182313496 +0000 UTC m=+1187.753456407" lastFinishedPulling="2025-12-15 07:11:35.291098576 +0000 UTC m=+1220.862241487" observedRunningTime="2025-12-15 07:11:37.056184988 +0000 UTC m=+1222.627327909" watchObservedRunningTime="2025-12-15 07:11:37.057374001 +0000 UTC m=+1222.628516932" Dec 15 07:11:37 crc kubenswrapper[4876]: I1215 07:11:37.146459 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-gbp2z" podStartSLOduration=17.742005888 podStartE2EDuration="27.146440371s" podCreationTimestamp="2025-12-15 07:11:10 +0000 UTC" firstStartedPulling="2025-12-15 07:11:24.751639998 +0000 UTC m=+1210.322782919" lastFinishedPulling="2025-12-15 07:11:34.156074491 +0000 UTC m=+1219.727217402" observedRunningTime="2025-12-15 07:11:37.14316059 +0000 UTC m=+1222.714303521" watchObservedRunningTime="2025-12-15 07:11:37.146440371 +0000 UTC m=+1222.717583282" Dec 15 07:11:38 crc kubenswrapper[4876]: I1215 07:11:38.034008 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:38 crc kubenswrapper[4876]: I1215 07:11:38.034364 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:11:39 crc kubenswrapper[4876]: I1215 07:11:39.043551 4876 generic.go:334] "Generic (PLEG): container finished" podID="8a65a162-9f80-4d1c-be2c-001dedcb5391" containerID="8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207" exitCode=0 Dec 15 07:11:39 crc kubenswrapper[4876]: I1215 07:11:39.043741 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8a65a162-9f80-4d1c-be2c-001dedcb5391","Type":"ContainerDied","Data":"8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207"} Dec 15 07:11:39 crc kubenswrapper[4876]: I1215 07:11:39.047639 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1503039d-0445-46ac-81ca-5af528a46ce2","Type":"ContainerDied","Data":"4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac"} Dec 15 07:11:39 crc kubenswrapper[4876]: I1215 07:11:39.048852 4876 generic.go:334] "Generic (PLEG): container finished" podID="1503039d-0445-46ac-81ca-5af528a46ce2" containerID="4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac" exitCode=0 Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.062580 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1503039d-0445-46ac-81ca-5af528a46ce2","Type":"ContainerStarted","Data":"da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787"} Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.064508 4876 generic.go:334] "Generic (PLEG): container finished" podID="4b653a98-0379-49c3-afc5-3ab4901d742e" containerID="5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4" exitCode=0 Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.064607 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" event={"ID":"4b653a98-0379-49c3-afc5-3ab4901d742e","Type":"ContainerDied","Data":"5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4"} Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.071510 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8a65a162-9f80-4d1c-be2c-001dedcb5391","Type":"ContainerStarted","Data":"cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a"} Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.085144 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"189a34a7-d451-4c26-b84e-5b056fe1e93b","Type":"ContainerStarted","Data":"84311730edcfcde730c08a397de2aee0f27365e627e2d75a586ebc62a3484c83"} Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.139453 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8fc3485b-9f78-40d0-b864-b40626fdba7c","Type":"ContainerStarted","Data":"a317e6932e2fdc31885ac1215fbae30ebcc70d8d5b074c22c0c2b75c7095ebd6"} Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.181309 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=25.956372274 podStartE2EDuration="36.181283113s" podCreationTimestamp="2025-12-15 07:11:04 +0000 UTC" firstStartedPulling="2025-12-15 07:11:24.485333675 +0000 UTC m=+1210.056476586" lastFinishedPulling="2025-12-15 07:11:34.710244514 +0000 UTC m=+1220.281387425" observedRunningTime="2025-12-15 07:11:40.136642285 +0000 UTC m=+1225.707785236" watchObservedRunningTime="2025-12-15 07:11:40.181283113 +0000 UTC m=+1225.752426034" Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.210015 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=16.840147576 podStartE2EDuration="30.209991259s" podCreationTimestamp="2025-12-15 07:11:10 +0000 UTC" firstStartedPulling="2025-12-15 07:11:25.522423676 +0000 UTC m=+1211.093566587" lastFinishedPulling="2025-12-15 07:11:38.892267359 +0000 UTC m=+1224.463410270" observedRunningTime="2025-12-15 07:11:40.184601646 +0000 UTC m=+1225.755744557" watchObservedRunningTime="2025-12-15 07:11:40.209991259 +0000 UTC m=+1225.781134170" Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.284877 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=28.807339405 podStartE2EDuration="38.284852994s" podCreationTimestamp="2025-12-15 07:11:02 +0000 UTC" firstStartedPulling="2025-12-15 07:11:24.679671243 +0000 UTC m=+1210.250814154" lastFinishedPulling="2025-12-15 07:11:34.157184832 +0000 UTC m=+1219.728327743" observedRunningTime="2025-12-15 07:11:40.252482707 +0000 UTC m=+1225.823625628" watchObservedRunningTime="2025-12-15 07:11:40.284852994 +0000 UTC m=+1225.855995905" Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.286038 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=13.241378817 podStartE2EDuration="27.286029747s" podCreationTimestamp="2025-12-15 07:11:13 +0000 UTC" firstStartedPulling="2025-12-15 07:11:24.864791105 +0000 UTC m=+1210.435934016" lastFinishedPulling="2025-12-15 07:11:38.909442035 +0000 UTC m=+1224.480584946" observedRunningTime="2025-12-15 07:11:40.278345404 +0000 UTC m=+1225.849488315" watchObservedRunningTime="2025-12-15 07:11:40.286029747 +0000 UTC m=+1225.857172658" Dec 15 07:11:40 crc kubenswrapper[4876]: I1215 07:11:40.936793 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 15 07:11:41 crc kubenswrapper[4876]: E1215 07:11:41.046316 4876 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.70:43312->38.102.83.70:35145: write tcp 38.102.83.70:43312->38.102.83.70:35145: write: broken pipe Dec 15 07:11:41 crc kubenswrapper[4876]: I1215 07:11:41.149699 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" event={"ID":"4b653a98-0379-49c3-afc5-3ab4901d742e","Type":"ContainerStarted","Data":"182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60"} Dec 15 07:11:41 crc kubenswrapper[4876]: I1215 07:11:41.591230 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:41 crc kubenswrapper[4876]: I1215 07:11:41.609760 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" podStartSLOduration=-9223371996.245045 podStartE2EDuration="40.609731944s" podCreationTimestamp="2025-12-15 07:11:01 +0000 UTC" firstStartedPulling="2025-12-15 07:11:02.475831533 +0000 UTC m=+1188.046974444" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:11:41.173021007 +0000 UTC m=+1226.744163928" watchObservedRunningTime="2025-12-15 07:11:41.609731944 +0000 UTC m=+1227.180874865" Dec 15 07:11:41 crc kubenswrapper[4876]: I1215 07:11:41.886407 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:41 crc kubenswrapper[4876]: I1215 07:11:41.886513 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:41 crc kubenswrapper[4876]: I1215 07:11:41.898578 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:41 crc kubenswrapper[4876]: I1215 07:11:41.934961 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:41 crc kubenswrapper[4876]: I1215 07:11:41.937033 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:41 crc kubenswrapper[4876]: I1215 07:11:41.989584 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.155382 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.190841 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.203692 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.391660 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-5l6xc"] Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.418084 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-794868bd45-zwwnp"] Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.423003 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.426449 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.434287 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-794868bd45-zwwnp"] Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.519952 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b7p7\" (UniqueName: \"kubernetes.io/projected/89b6a52d-befa-495a-9021-c905bf32a44e-kube-api-access-5b7p7\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.520019 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-ovsdbserver-sb\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.520041 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-config\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.520075 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-dns-svc\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.545995 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.547440 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.551469 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-pjp72" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.551477 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.551741 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.551766 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.554857 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.567456 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-794868bd45-zwwnp"] Dec 15 07:11:42 crc kubenswrapper[4876]: E1215 07:11:42.568069 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-5b7p7 ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-794868bd45-zwwnp" podUID="89b6a52d-befa-495a-9021-c905bf32a44e" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.586166 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-knn2n"] Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.587175 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.594367 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.600887 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-knn2n"] Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.608929 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-4dtb5"] Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.611806 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.618309 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627247 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-dns-svc\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627314 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87hh9\" (UniqueName: \"kubernetes.io/projected/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-kube-api-access-87hh9\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627368 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5nh2\" (UniqueName: \"kubernetes.io/projected/fbe2efa0-0b59-42cb-93d7-3540f4b03613-kube-api-access-r5nh2\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627410 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovs-rundir\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627461 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-scripts\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627476 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-config\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627496 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b7p7\" (UniqueName: \"kubernetes.io/projected/89b6a52d-befa-495a-9021-c905bf32a44e-kube-api-access-5b7p7\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627533 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovn-rundir\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627552 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627608 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627625 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627639 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-config\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627680 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-ovsdbserver-sb\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627697 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627717 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-config\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627756 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.627772 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-combined-ca-bundle\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.628465 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-dns-svc\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.628951 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-ovsdbserver-sb\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.629005 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-config\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.649193 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-4dtb5"] Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.666629 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b7p7\" (UniqueName: \"kubernetes.io/projected/89b6a52d-befa-495a-9021-c905bf32a44e-kube-api-access-5b7p7\") pod \"dnsmasq-dns-794868bd45-zwwnp\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.728979 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-config\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.729267 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87hh9\" (UniqueName: \"kubernetes.io/projected/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-kube-api-access-87hh9\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.729409 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5nh2\" (UniqueName: \"kubernetes.io/projected/fbe2efa0-0b59-42cb-93d7-3540f4b03613-kube-api-access-r5nh2\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.729553 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-sb\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.729655 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-dns-svc\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.729767 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovs-rundir\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.729897 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-scripts\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.729992 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-config\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.730115 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovn-rundir\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.730385 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.730489 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.730590 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-config\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.730680 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.730777 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwrpz\" (UniqueName: \"kubernetes.io/projected/5b261eca-9e17-49e0-8dfb-bbd46f21e144-kube-api-access-kwrpz\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.730901 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.731041 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.731166 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-combined-ca-bundle\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.731281 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-nb\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.731381 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovn-rundir\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.730713 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovs-rundir\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.732507 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.731311 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-config\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.732960 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-config\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.733768 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-scripts\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.736269 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.736489 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.736537 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.737592 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-combined-ca-bundle\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.740796 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.750437 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87hh9\" (UniqueName: \"kubernetes.io/projected/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-kube-api-access-87hh9\") pod \"ovn-controller-metrics-knn2n\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.750711 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5nh2\" (UniqueName: \"kubernetes.io/projected/fbe2efa0-0b59-42cb-93d7-3540f4b03613-kube-api-access-r5nh2\") pod \"ovn-northd-0\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.833093 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwrpz\" (UniqueName: \"kubernetes.io/projected/5b261eca-9e17-49e0-8dfb-bbd46f21e144-kube-api-access-kwrpz\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.834916 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-nb\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.835035 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-nb\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.836080 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-config\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.836866 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-config\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.837180 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-sb\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.837290 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-dns-svc\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.838612 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-dns-svc\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.838697 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-sb\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.863456 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwrpz\" (UniqueName: \"kubernetes.io/projected/5b261eca-9e17-49e0-8dfb-bbd46f21e144-kube-api-access-kwrpz\") pod \"dnsmasq-dns-757dc6fff9-4dtb5\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.868055 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.906735 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:11:42 crc kubenswrapper[4876]: I1215 07:11:42.941498 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.163769 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.163875 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" podUID="4b653a98-0379-49c3-afc5-3ab4901d742e" containerName="dnsmasq-dns" containerID="cri-o://182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60" gracePeriod=10 Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.176826 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.243691 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-dns-svc\") pod \"89b6a52d-befa-495a-9021-c905bf32a44e\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.243754 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-ovsdbserver-sb\") pod \"89b6a52d-befa-495a-9021-c905bf32a44e\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.243929 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5b7p7\" (UniqueName: \"kubernetes.io/projected/89b6a52d-befa-495a-9021-c905bf32a44e-kube-api-access-5b7p7\") pod \"89b6a52d-befa-495a-9021-c905bf32a44e\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.243974 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-config\") pod \"89b6a52d-befa-495a-9021-c905bf32a44e\" (UID: \"89b6a52d-befa-495a-9021-c905bf32a44e\") " Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.244357 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "89b6a52d-befa-495a-9021-c905bf32a44e" (UID: "89b6a52d-befa-495a-9021-c905bf32a44e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.244390 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-config" (OuterVolumeSpecName: "config") pod "89b6a52d-befa-495a-9021-c905bf32a44e" (UID: "89b6a52d-befa-495a-9021-c905bf32a44e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.245639 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.245675 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.246141 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "89b6a52d-befa-495a-9021-c905bf32a44e" (UID: "89b6a52d-befa-495a-9021-c905bf32a44e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.248709 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89b6a52d-befa-495a-9021-c905bf32a44e-kube-api-access-5b7p7" (OuterVolumeSpecName: "kube-api-access-5b7p7") pod "89b6a52d-befa-495a-9021-c905bf32a44e" (UID: "89b6a52d-befa-495a-9021-c905bf32a44e"). InnerVolumeSpecName "kube-api-access-5b7p7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.347172 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5b7p7\" (UniqueName: \"kubernetes.io/projected/89b6a52d-befa-495a-9021-c905bf32a44e-kube-api-access-5b7p7\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.347218 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89b6a52d-befa-495a-9021-c905bf32a44e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.430883 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-knn2n"] Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.496987 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-4dtb5"] Dec 15 07:11:43 crc kubenswrapper[4876]: W1215 07:11:43.511976 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b261eca_9e17_49e0_8dfb_bbd46f21e144.slice/crio-24ddfeee2196de0959fbf8b9fe2bc2b2cd778a439df3fbe43791e4f2d8c8dbbe WatchSource:0}: Error finding container 24ddfeee2196de0959fbf8b9fe2bc2b2cd778a439df3fbe43791e4f2d8c8dbbe: Status 404 returned error can't find the container with id 24ddfeee2196de0959fbf8b9fe2bc2b2cd778a439df3fbe43791e4f2d8c8dbbe Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.544822 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:43 crc kubenswrapper[4876]: W1215 07:11:43.555867 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbe2efa0_0b59_42cb_93d7_3540f4b03613.slice/crio-3f49bdc523e736103d24c8ab8b44ce7193b48c3e5191b3da9c18fae0590c95fa WatchSource:0}: Error finding container 3f49bdc523e736103d24c8ab8b44ce7193b48c3e5191b3da9c18fae0590c95fa: Status 404 returned error can't find the container with id 3f49bdc523e736103d24c8ab8b44ce7193b48c3e5191b3da9c18fae0590c95fa Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.556037 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.660369 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-config\") pod \"4b653a98-0379-49c3-afc5-3ab4901d742e\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.660469 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zb6v\" (UniqueName: \"kubernetes.io/projected/4b653a98-0379-49c3-afc5-3ab4901d742e-kube-api-access-9zb6v\") pod \"4b653a98-0379-49c3-afc5-3ab4901d742e\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.660604 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-dns-svc\") pod \"4b653a98-0379-49c3-afc5-3ab4901d742e\" (UID: \"4b653a98-0379-49c3-afc5-3ab4901d742e\") " Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.667604 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b653a98-0379-49c3-afc5-3ab4901d742e-kube-api-access-9zb6v" (OuterVolumeSpecName: "kube-api-access-9zb6v") pod "4b653a98-0379-49c3-afc5-3ab4901d742e" (UID: "4b653a98-0379-49c3-afc5-3ab4901d742e"). InnerVolumeSpecName "kube-api-access-9zb6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.719893 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-config" (OuterVolumeSpecName: "config") pod "4b653a98-0379-49c3-afc5-3ab4901d742e" (UID: "4b653a98-0379-49c3-afc5-3ab4901d742e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.728254 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4b653a98-0379-49c3-afc5-3ab4901d742e" (UID: "4b653a98-0379-49c3-afc5-3ab4901d742e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.763061 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.763095 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b653a98-0379-49c3-afc5-3ab4901d742e-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:43 crc kubenswrapper[4876]: I1215 07:11:43.763118 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zb6v\" (UniqueName: \"kubernetes.io/projected/4b653a98-0379-49c3-afc5-3ab4901d742e-kube-api-access-9zb6v\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.171603 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fbe2efa0-0b59-42cb-93d7-3540f4b03613","Type":"ContainerStarted","Data":"3f49bdc523e736103d24c8ab8b44ce7193b48c3e5191b3da9c18fae0590c95fa"} Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.176129 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-knn2n" event={"ID":"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1","Type":"ContainerStarted","Data":"69805d5a06cd5524fafd7005c790931dcf3f61348fabbc5ef0cc66ad4bdaf487"} Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.176181 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-knn2n" event={"ID":"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1","Type":"ContainerStarted","Data":"b7a27214e09139d172797f5205688fad1bc9ff2fd30ee9378f1540e7203b85e8"} Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.179330 4876 generic.go:334] "Generic (PLEG): container finished" podID="5b261eca-9e17-49e0-8dfb-bbd46f21e144" containerID="14126e8675d2e7883c21ed6d848918570dd07e3163b6a13005ce0744a4dfba12" exitCode=0 Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.179421 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" event={"ID":"5b261eca-9e17-49e0-8dfb-bbd46f21e144","Type":"ContainerDied","Data":"14126e8675d2e7883c21ed6d848918570dd07e3163b6a13005ce0744a4dfba12"} Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.179468 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" event={"ID":"5b261eca-9e17-49e0-8dfb-bbd46f21e144","Type":"ContainerStarted","Data":"24ddfeee2196de0959fbf8b9fe2bc2b2cd778a439df3fbe43791e4f2d8c8dbbe"} Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.200199 4876 generic.go:334] "Generic (PLEG): container finished" podID="4b653a98-0379-49c3-afc5-3ab4901d742e" containerID="182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60" exitCode=0 Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.200543 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.200422 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" event={"ID":"4b653a98-0379-49c3-afc5-3ab4901d742e","Type":"ContainerDied","Data":"182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60"} Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.200689 4876 scope.go:117] "RemoveContainer" containerID="182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.200639 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-5l6xc" event={"ID":"4b653a98-0379-49c3-afc5-3ab4901d742e","Type":"ContainerDied","Data":"2b9351b1f40f480e18cb79d70ab43f161535943160d70cb161a7288dafc25c78"} Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.208055 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-794868bd45-zwwnp" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.210775 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-knn2n" podStartSLOduration=2.210744929 podStartE2EDuration="2.210744929s" podCreationTimestamp="2025-12-15 07:11:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:11:44.197522533 +0000 UTC m=+1229.768665484" watchObservedRunningTime="2025-12-15 07:11:44.210744929 +0000 UTC m=+1229.781887870" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.229764 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.234431 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.309506 4876 scope.go:117] "RemoveContainer" containerID="5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.320436 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-5l6xc"] Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.326772 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-5l6xc"] Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.342966 4876 scope.go:117] "RemoveContainer" containerID="182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60" Dec 15 07:11:44 crc kubenswrapper[4876]: E1215 07:11:44.344628 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60\": container with ID starting with 182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60 not found: ID does not exist" containerID="182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.344660 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60"} err="failed to get container status \"182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60\": rpc error: code = NotFound desc = could not find container \"182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60\": container with ID starting with 182f77d3252a79c32bd00e2f47cf0ad7a179027dde6dd348d1e13416555ddf60 not found: ID does not exist" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.344680 4876 scope.go:117] "RemoveContainer" containerID="5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4" Dec 15 07:11:44 crc kubenswrapper[4876]: E1215 07:11:44.344892 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4\": container with ID starting with 5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4 not found: ID does not exist" containerID="5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.344908 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4"} err="failed to get container status \"5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4\": rpc error: code = NotFound desc = could not find container \"5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4\": container with ID starting with 5696fbe7b4d4ec2ddb36dab744fc807dfb8f3921ba9a39c302b9c8401f6ca3c4 not found: ID does not exist" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.357444 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-794868bd45-zwwnp"] Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.362218 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-794868bd45-zwwnp"] Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.720144 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b653a98-0379-49c3-afc5-3ab4901d742e" path="/var/lib/kubelet/pods/4b653a98-0379-49c3-afc5-3ab4901d742e/volumes" Dec 15 07:11:44 crc kubenswrapper[4876]: I1215 07:11:44.721234 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89b6a52d-befa-495a-9021-c905bf32a44e" path="/var/lib/kubelet/pods/89b6a52d-befa-495a-9021-c905bf32a44e/volumes" Dec 15 07:11:45 crc kubenswrapper[4876]: I1215 07:11:45.210756 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fbe2efa0-0b59-42cb-93d7-3540f4b03613","Type":"ContainerStarted","Data":"c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626"} Dec 15 07:11:45 crc kubenswrapper[4876]: I1215 07:11:45.210824 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fbe2efa0-0b59-42cb-93d7-3540f4b03613","Type":"ContainerStarted","Data":"8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1"} Dec 15 07:11:45 crc kubenswrapper[4876]: I1215 07:11:45.213749 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" event={"ID":"5b261eca-9e17-49e0-8dfb-bbd46f21e144","Type":"ContainerStarted","Data":"62dca6435911e1c04508743ce70b7776270777e54c38f943f00a5ca91d32c952"} Dec 15 07:11:45 crc kubenswrapper[4876]: I1215 07:11:45.213800 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:45 crc kubenswrapper[4876]: I1215 07:11:45.231521 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.042808143 podStartE2EDuration="3.231490696s" podCreationTimestamp="2025-12-15 07:11:42 +0000 UTC" firstStartedPulling="2025-12-15 07:11:43.558902829 +0000 UTC m=+1229.130045740" lastFinishedPulling="2025-12-15 07:11:44.747585382 +0000 UTC m=+1230.318728293" observedRunningTime="2025-12-15 07:11:45.229615575 +0000 UTC m=+1230.800758486" watchObservedRunningTime="2025-12-15 07:11:45.231490696 +0000 UTC m=+1230.802633607" Dec 15 07:11:45 crc kubenswrapper[4876]: I1215 07:11:45.248932 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" podStartSLOduration=3.24891299 podStartE2EDuration="3.24891299s" podCreationTimestamp="2025-12-15 07:11:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:11:45.246531433 +0000 UTC m=+1230.817674364" watchObservedRunningTime="2025-12-15 07:11:45.24891299 +0000 UTC m=+1230.820055901" Dec 15 07:11:45 crc kubenswrapper[4876]: I1215 07:11:45.682181 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:45 crc kubenswrapper[4876]: I1215 07:11:45.682297 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:45 crc kubenswrapper[4876]: I1215 07:11:45.935838 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:46 crc kubenswrapper[4876]: I1215 07:11:46.220269 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 15 07:11:46 crc kubenswrapper[4876]: I1215 07:11:46.296189 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 15 07:11:46 crc kubenswrapper[4876]: I1215 07:11:46.465610 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 15 07:11:46 crc kubenswrapper[4876]: I1215 07:11:46.551522 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 15 07:11:47 crc kubenswrapper[4876]: I1215 07:11:47.797617 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 15 07:11:47 crc kubenswrapper[4876]: I1215 07:11:47.876907 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-4dtb5"] Dec 15 07:11:47 crc kubenswrapper[4876]: I1215 07:11:47.877208 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" podUID="5b261eca-9e17-49e0-8dfb-bbd46f21e144" containerName="dnsmasq-dns" containerID="cri-o://62dca6435911e1c04508743ce70b7776270777e54c38f943f00a5ca91d32c952" gracePeriod=10 Dec 15 07:11:47 crc kubenswrapper[4876]: I1215 07:11:47.915411 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-2fnbt"] Dec 15 07:11:47 crc kubenswrapper[4876]: E1215 07:11:47.915789 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b653a98-0379-49c3-afc5-3ab4901d742e" containerName="dnsmasq-dns" Dec 15 07:11:47 crc kubenswrapper[4876]: I1215 07:11:47.915810 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b653a98-0379-49c3-afc5-3ab4901d742e" containerName="dnsmasq-dns" Dec 15 07:11:47 crc kubenswrapper[4876]: E1215 07:11:47.915829 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b653a98-0379-49c3-afc5-3ab4901d742e" containerName="init" Dec 15 07:11:47 crc kubenswrapper[4876]: I1215 07:11:47.915836 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b653a98-0379-49c3-afc5-3ab4901d742e" containerName="init" Dec 15 07:11:47 crc kubenswrapper[4876]: I1215 07:11:47.916006 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b653a98-0379-49c3-afc5-3ab4901d742e" containerName="dnsmasq-dns" Dec 15 07:11:47 crc kubenswrapper[4876]: I1215 07:11:47.916785 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:47 crc kubenswrapper[4876]: I1215 07:11:47.923348 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-2fnbt"] Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.052917 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-dns-svc\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.053483 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-config\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.053816 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.053864 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.053899 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbc49\" (UniqueName: \"kubernetes.io/projected/6cbe0e15-c52e-43d4-986c-ace016ab598e-kube-api-access-fbc49\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.155509 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-dns-svc\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.156213 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-config\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.156367 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.156433 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-dns-svc\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.156520 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.156910 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-config\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.157036 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbc49\" (UniqueName: \"kubernetes.io/projected/6cbe0e15-c52e-43d4-986c-ace016ab598e-kube-api-access-fbc49\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.157154 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.157536 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.178240 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbc49\" (UniqueName: \"kubernetes.io/projected/6cbe0e15-c52e-43d4-986c-ace016ab598e-kube-api-access-fbc49\") pod \"dnsmasq-dns-6cb545bd4c-2fnbt\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.235052 4876 generic.go:334] "Generic (PLEG): container finished" podID="5b261eca-9e17-49e0-8dfb-bbd46f21e144" containerID="62dca6435911e1c04508743ce70b7776270777e54c38f943f00a5ca91d32c952" exitCode=0 Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.235175 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" event={"ID":"5b261eca-9e17-49e0-8dfb-bbd46f21e144","Type":"ContainerDied","Data":"62dca6435911e1c04508743ce70b7776270777e54c38f943f00a5ca91d32c952"} Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.260180 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.813018 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-2fnbt"] Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.849892 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.931820 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 15 07:11:48 crc kubenswrapper[4876]: E1215 07:11:48.932237 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b261eca-9e17-49e0-8dfb-bbd46f21e144" containerName="dnsmasq-dns" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.932360 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b261eca-9e17-49e0-8dfb-bbd46f21e144" containerName="dnsmasq-dns" Dec 15 07:11:48 crc kubenswrapper[4876]: E1215 07:11:48.932429 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b261eca-9e17-49e0-8dfb-bbd46f21e144" containerName="init" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.932443 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b261eca-9e17-49e0-8dfb-bbd46f21e144" containerName="init" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.932625 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b261eca-9e17-49e0-8dfb-bbd46f21e144" containerName="dnsmasq-dns" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.939517 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.941564 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.941742 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.941859 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.942337 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-2cwzs" Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.948396 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.968349 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-sb\") pod \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.968451 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-dns-svc\") pod \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.968493 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwrpz\" (UniqueName: \"kubernetes.io/projected/5b261eca-9e17-49e0-8dfb-bbd46f21e144-kube-api-access-kwrpz\") pod \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.968553 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-config\") pod \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.968614 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-nb\") pod \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\" (UID: \"5b261eca-9e17-49e0-8dfb-bbd46f21e144\") " Dec 15 07:11:48 crc kubenswrapper[4876]: I1215 07:11:48.972940 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b261eca-9e17-49e0-8dfb-bbd46f21e144-kube-api-access-kwrpz" (OuterVolumeSpecName: "kube-api-access-kwrpz") pod "5b261eca-9e17-49e0-8dfb-bbd46f21e144" (UID: "5b261eca-9e17-49e0-8dfb-bbd46f21e144"). InnerVolumeSpecName "kube-api-access-kwrpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.011421 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5b261eca-9e17-49e0-8dfb-bbd46f21e144" (UID: "5b261eca-9e17-49e0-8dfb-bbd46f21e144"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.013211 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-config" (OuterVolumeSpecName: "config") pod "5b261eca-9e17-49e0-8dfb-bbd46f21e144" (UID: "5b261eca-9e17-49e0-8dfb-bbd46f21e144"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.017590 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5b261eca-9e17-49e0-8dfb-bbd46f21e144" (UID: "5b261eca-9e17-49e0-8dfb-bbd46f21e144"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.020789 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5b261eca-9e17-49e0-8dfb-bbd46f21e144" (UID: "5b261eca-9e17-49e0-8dfb-bbd46f21e144"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.069851 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-lock\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.069981 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.070092 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-cache\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.070151 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq5zf\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-kube-api-access-fq5zf\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.070221 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.070339 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.070355 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.070364 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.070373 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwrpz\" (UniqueName: \"kubernetes.io/projected/5b261eca-9e17-49e0-8dfb-bbd46f21e144-kube-api-access-kwrpz\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.070575 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b261eca-9e17-49e0-8dfb-bbd46f21e144-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.171872 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.171999 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-cache\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.172035 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq5zf\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-kube-api-access-fq5zf\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: E1215 07:11:49.172084 4876 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 15 07:11:49 crc kubenswrapper[4876]: E1215 07:11:49.172151 4876 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 15 07:11:49 crc kubenswrapper[4876]: E1215 07:11:49.172208 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift podName:d91d3fea-2b02-48ad-b238-7a815dd36d22 nodeName:}" failed. No retries permitted until 2025-12-15 07:11:49.672188615 +0000 UTC m=+1235.243331596 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift") pod "swift-storage-0" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22") : configmap "swift-ring-files" not found Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.172096 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.172491 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.172608 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-lock\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.172694 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-cache\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.173080 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-lock\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.197488 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.209854 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq5zf\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-kube-api-access-fq5zf\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.244071 4876 generic.go:334] "Generic (PLEG): container finished" podID="6cbe0e15-c52e-43d4-986c-ace016ab598e" containerID="6ca18e18f554fb532d60b436b864f1ba0e4abb0dabb3d447eca9979d0d5e76df" exitCode=0 Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.244164 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" event={"ID":"6cbe0e15-c52e-43d4-986c-ace016ab598e","Type":"ContainerDied","Data":"6ca18e18f554fb532d60b436b864f1ba0e4abb0dabb3d447eca9979d0d5e76df"} Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.244219 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" event={"ID":"6cbe0e15-c52e-43d4-986c-ace016ab598e","Type":"ContainerStarted","Data":"4c8fa49e413fba19144c3582c46453dbc4e1e5afa5aec6106e8b8844fcde11df"} Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.245948 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" event={"ID":"5b261eca-9e17-49e0-8dfb-bbd46f21e144","Type":"ContainerDied","Data":"24ddfeee2196de0959fbf8b9fe2bc2b2cd778a439df3fbe43791e4f2d8c8dbbe"} Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.245985 4876 scope.go:117] "RemoveContainer" containerID="62dca6435911e1c04508743ce70b7776270777e54c38f943f00a5ca91d32c952" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.246071 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757dc6fff9-4dtb5" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.326050 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-4dtb5"] Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.326704 4876 scope.go:117] "RemoveContainer" containerID="14126e8675d2e7883c21ed6d848918570dd07e3163b6a13005ce0744a4dfba12" Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.332358 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757dc6fff9-4dtb5"] Dec 15 07:11:49 crc kubenswrapper[4876]: I1215 07:11:49.681063 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:49 crc kubenswrapper[4876]: E1215 07:11:49.681244 4876 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 15 07:11:49 crc kubenswrapper[4876]: E1215 07:11:49.681436 4876 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 15 07:11:49 crc kubenswrapper[4876]: E1215 07:11:49.681502 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift podName:d91d3fea-2b02-48ad-b238-7a815dd36d22 nodeName:}" failed. No retries permitted until 2025-12-15 07:11:50.681480947 +0000 UTC m=+1236.252623858 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift") pod "swift-storage-0" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22") : configmap "swift-ring-files" not found Dec 15 07:11:50 crc kubenswrapper[4876]: I1215 07:11:50.253897 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" event={"ID":"6cbe0e15-c52e-43d4-986c-ace016ab598e","Type":"ContainerStarted","Data":"c807ad69fbc79e09e32d13f8b7b357644c1f9fa31582855fed18f01492071360"} Dec 15 07:11:50 crc kubenswrapper[4876]: I1215 07:11:50.254179 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:50 crc kubenswrapper[4876]: I1215 07:11:50.277641 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" podStartSLOduration=3.277620433 podStartE2EDuration="3.277620433s" podCreationTimestamp="2025-12-15 07:11:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:11:50.270322086 +0000 UTC m=+1235.841465017" watchObservedRunningTime="2025-12-15 07:11:50.277620433 +0000 UTC m=+1235.848763344" Dec 15 07:11:50 crc kubenswrapper[4876]: I1215 07:11:50.696208 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:50 crc kubenswrapper[4876]: E1215 07:11:50.696369 4876 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 15 07:11:50 crc kubenswrapper[4876]: E1215 07:11:50.696384 4876 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 15 07:11:50 crc kubenswrapper[4876]: E1215 07:11:50.696429 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift podName:d91d3fea-2b02-48ad-b238-7a815dd36d22 nodeName:}" failed. No retries permitted until 2025-12-15 07:11:52.696416615 +0000 UTC m=+1238.267559526 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift") pod "swift-storage-0" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22") : configmap "swift-ring-files" not found Dec 15 07:11:50 crc kubenswrapper[4876]: I1215 07:11:50.714485 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b261eca-9e17-49e0-8dfb-bbd46f21e144" path="/var/lib/kubelet/pods/5b261eca-9e17-49e0-8dfb-bbd46f21e144/volumes" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.164722 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-2c9d-account-create-update-nlmt8"] Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.165719 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2c9d-account-create-update-nlmt8" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.167879 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.184095 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2c9d-account-create-update-nlmt8"] Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.230242 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-qnfks"] Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.231204 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qnfks" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.251593 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-qnfks"] Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.314964 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsd24\" (UniqueName: \"kubernetes.io/projected/2fd1ac31-1a68-498e-b4af-af562ad7acd3-kube-api-access-wsd24\") pod \"glance-2c9d-account-create-update-nlmt8\" (UID: \"2fd1ac31-1a68-498e-b4af-af562ad7acd3\") " pod="openstack/glance-2c9d-account-create-update-nlmt8" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.315145 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fd1ac31-1a68-498e-b4af-af562ad7acd3-operator-scripts\") pod \"glance-2c9d-account-create-update-nlmt8\" (UID: \"2fd1ac31-1a68-498e-b4af-af562ad7acd3\") " pod="openstack/glance-2c9d-account-create-update-nlmt8" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.416514 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9acc04fe-227b-4cfa-90d2-5d12001c6706-operator-scripts\") pod \"glance-db-create-qnfks\" (UID: \"9acc04fe-227b-4cfa-90d2-5d12001c6706\") " pod="openstack/glance-db-create-qnfks" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.416596 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcfqn\" (UniqueName: \"kubernetes.io/projected/9acc04fe-227b-4cfa-90d2-5d12001c6706-kube-api-access-wcfqn\") pod \"glance-db-create-qnfks\" (UID: \"9acc04fe-227b-4cfa-90d2-5d12001c6706\") " pod="openstack/glance-db-create-qnfks" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.416673 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fd1ac31-1a68-498e-b4af-af562ad7acd3-operator-scripts\") pod \"glance-2c9d-account-create-update-nlmt8\" (UID: \"2fd1ac31-1a68-498e-b4af-af562ad7acd3\") " pod="openstack/glance-2c9d-account-create-update-nlmt8" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.416766 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsd24\" (UniqueName: \"kubernetes.io/projected/2fd1ac31-1a68-498e-b4af-af562ad7acd3-kube-api-access-wsd24\") pod \"glance-2c9d-account-create-update-nlmt8\" (UID: \"2fd1ac31-1a68-498e-b4af-af562ad7acd3\") " pod="openstack/glance-2c9d-account-create-update-nlmt8" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.417947 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fd1ac31-1a68-498e-b4af-af562ad7acd3-operator-scripts\") pod \"glance-2c9d-account-create-update-nlmt8\" (UID: \"2fd1ac31-1a68-498e-b4af-af562ad7acd3\") " pod="openstack/glance-2c9d-account-create-update-nlmt8" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.448307 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsd24\" (UniqueName: \"kubernetes.io/projected/2fd1ac31-1a68-498e-b4af-af562ad7acd3-kube-api-access-wsd24\") pod \"glance-2c9d-account-create-update-nlmt8\" (UID: \"2fd1ac31-1a68-498e-b4af-af562ad7acd3\") " pod="openstack/glance-2c9d-account-create-update-nlmt8" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.482211 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2c9d-account-create-update-nlmt8" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.519025 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcfqn\" (UniqueName: \"kubernetes.io/projected/9acc04fe-227b-4cfa-90d2-5d12001c6706-kube-api-access-wcfqn\") pod \"glance-db-create-qnfks\" (UID: \"9acc04fe-227b-4cfa-90d2-5d12001c6706\") " pod="openstack/glance-db-create-qnfks" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.519222 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9acc04fe-227b-4cfa-90d2-5d12001c6706-operator-scripts\") pod \"glance-db-create-qnfks\" (UID: \"9acc04fe-227b-4cfa-90d2-5d12001c6706\") " pod="openstack/glance-db-create-qnfks" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.520068 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9acc04fe-227b-4cfa-90d2-5d12001c6706-operator-scripts\") pod \"glance-db-create-qnfks\" (UID: \"9acc04fe-227b-4cfa-90d2-5d12001c6706\") " pod="openstack/glance-db-create-qnfks" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.536462 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcfqn\" (UniqueName: \"kubernetes.io/projected/9acc04fe-227b-4cfa-90d2-5d12001c6706-kube-api-access-wcfqn\") pod \"glance-db-create-qnfks\" (UID: \"9acc04fe-227b-4cfa-90d2-5d12001c6706\") " pod="openstack/glance-db-create-qnfks" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.546202 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qnfks" Dec 15 07:11:51 crc kubenswrapper[4876]: I1215 07:11:51.875481 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2c9d-account-create-update-nlmt8"] Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.102349 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-qnfks"] Dec 15 07:11:52 crc kubenswrapper[4876]: W1215 07:11:52.106479 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9acc04fe_227b_4cfa_90d2_5d12001c6706.slice/crio-ad15cf80abbabadc498a7189154f756369880ec16bb866c5a3f6ad8311770362 WatchSource:0}: Error finding container ad15cf80abbabadc498a7189154f756369880ec16bb866c5a3f6ad8311770362: Status 404 returned error can't find the container with id ad15cf80abbabadc498a7189154f756369880ec16bb866c5a3f6ad8311770362 Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.276470 4876 generic.go:334] "Generic (PLEG): container finished" podID="2fd1ac31-1a68-498e-b4af-af562ad7acd3" containerID="05df66cfb1e6136f09e5899bf3239d9ae901f441c38d4c1dde084356b7683117" exitCode=0 Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.276543 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2c9d-account-create-update-nlmt8" event={"ID":"2fd1ac31-1a68-498e-b4af-af562ad7acd3","Type":"ContainerDied","Data":"05df66cfb1e6136f09e5899bf3239d9ae901f441c38d4c1dde084356b7683117"} Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.276572 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2c9d-account-create-update-nlmt8" event={"ID":"2fd1ac31-1a68-498e-b4af-af562ad7acd3","Type":"ContainerStarted","Data":"523d0f90c456013c425af6b302fd2457e9e299f245dbd17e379bb66aaeecb762"} Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.282731 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-qnfks" event={"ID":"9acc04fe-227b-4cfa-90d2-5d12001c6706","Type":"ContainerStarted","Data":"80cabd3fa89c350ac8db85619ee6a53219f9642134cc11ad54167b0ebdc42f33"} Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.283036 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-qnfks" event={"ID":"9acc04fe-227b-4cfa-90d2-5d12001c6706","Type":"ContainerStarted","Data":"ad15cf80abbabadc498a7189154f756369880ec16bb866c5a3f6ad8311770362"} Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.320331 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-qnfks" podStartSLOduration=1.320306292 podStartE2EDuration="1.320306292s" podCreationTimestamp="2025-12-15 07:11:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:11:52.307560354 +0000 UTC m=+1237.878703265" watchObservedRunningTime="2025-12-15 07:11:52.320306292 +0000 UTC m=+1237.891449223" Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.716028 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:52 crc kubenswrapper[4876]: E1215 07:11:52.716251 4876 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 15 07:11:52 crc kubenswrapper[4876]: E1215 07:11:52.716276 4876 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 15 07:11:52 crc kubenswrapper[4876]: E1215 07:11:52.716338 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift podName:d91d3fea-2b02-48ad-b238-7a815dd36d22 nodeName:}" failed. No retries permitted until 2025-12-15 07:11:56.716319997 +0000 UTC m=+1242.287462918 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift") pod "swift-storage-0" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22") : configmap "swift-ring-files" not found Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.937123 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-c6x5c"] Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.938542 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.940556 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.940617 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.940637 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.955194 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-c6x5c"] Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.976409 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-c6x5c"] Dec 15 07:11:52 crc kubenswrapper[4876]: E1215 07:11:52.976976 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-vfdgt ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-c6x5c" podUID="5af8c492-8d6e-4f55-942c-2e1132310d9d" Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.998491 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-chs2q"] Dec 15 07:11:52 crc kubenswrapper[4876]: I1215 07:11:52.999842 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.004951 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-chs2q"] Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.021191 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-dispersionconf\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.021255 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-combined-ca-bundle\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.021320 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5af8c492-8d6e-4f55-942c-2e1132310d9d-etc-swift\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.021345 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-scripts\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.021375 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-swiftconf\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.021392 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-ring-data-devices\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.021462 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfdgt\" (UniqueName: \"kubernetes.io/projected/5af8c492-8d6e-4f55-942c-2e1132310d9d-kube-api-access-vfdgt\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.122814 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-swiftconf\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.122923 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5af8c492-8d6e-4f55-942c-2e1132310d9d-etc-swift\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.122975 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-scripts\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.123010 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqmds\" (UniqueName: \"kubernetes.io/projected/2887b466-b7ab-45fe-9cf5-bff066201589-kube-api-access-zqmds\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.123076 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-swiftconf\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.123165 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-ring-data-devices\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.123207 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2887b466-b7ab-45fe-9cf5-bff066201589-etc-swift\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.123276 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-ring-data-devices\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.123467 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5af8c492-8d6e-4f55-942c-2e1132310d9d-etc-swift\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.123391 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-combined-ca-bundle\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.123968 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-scripts\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.124372 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-ring-data-devices\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.124536 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfdgt\" (UniqueName: \"kubernetes.io/projected/5af8c492-8d6e-4f55-942c-2e1132310d9d-kube-api-access-vfdgt\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.124607 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-dispersionconf\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.124640 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-dispersionconf\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.124689 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-combined-ca-bundle\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.124738 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-scripts\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.129339 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-combined-ca-bundle\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.130942 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-dispersionconf\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.132529 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-swiftconf\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.142817 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfdgt\" (UniqueName: \"kubernetes.io/projected/5af8c492-8d6e-4f55-942c-2e1132310d9d-kube-api-access-vfdgt\") pod \"swift-ring-rebalance-c6x5c\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.225934 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2887b466-b7ab-45fe-9cf5-bff066201589-etc-swift\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.225985 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-ring-data-devices\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.226023 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-combined-ca-bundle\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.226064 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-dispersionconf\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.226090 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-scripts\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.226136 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-swiftconf\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.226175 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqmds\" (UniqueName: \"kubernetes.io/projected/2887b466-b7ab-45fe-9cf5-bff066201589-kube-api-access-zqmds\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.226532 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2887b466-b7ab-45fe-9cf5-bff066201589-etc-swift\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.227412 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-ring-data-devices\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.227912 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-scripts\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.229899 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-swiftconf\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.230464 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-dispersionconf\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.232403 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-combined-ca-bundle\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.243411 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqmds\" (UniqueName: \"kubernetes.io/projected/2887b466-b7ab-45fe-9cf5-bff066201589-kube-api-access-zqmds\") pod \"swift-ring-rebalance-chs2q\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.293986 4876 generic.go:334] "Generic (PLEG): container finished" podID="9acc04fe-227b-4cfa-90d2-5d12001c6706" containerID="80cabd3fa89c350ac8db85619ee6a53219f9642134cc11ad54167b0ebdc42f33" exitCode=0 Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.294194 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.296412 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-qnfks" event={"ID":"9acc04fe-227b-4cfa-90d2-5d12001c6706","Type":"ContainerDied","Data":"80cabd3fa89c350ac8db85619ee6a53219f9642134cc11ad54167b0ebdc42f33"} Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.321751 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.328651 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.429997 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-combined-ca-bundle\") pod \"5af8c492-8d6e-4f55-942c-2e1132310d9d\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.430082 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-dispersionconf\") pod \"5af8c492-8d6e-4f55-942c-2e1132310d9d\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.430179 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-scripts\") pod \"5af8c492-8d6e-4f55-942c-2e1132310d9d\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.430202 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-ring-data-devices\") pod \"5af8c492-8d6e-4f55-942c-2e1132310d9d\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.430994 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-scripts" (OuterVolumeSpecName: "scripts") pod "5af8c492-8d6e-4f55-942c-2e1132310d9d" (UID: "5af8c492-8d6e-4f55-942c-2e1132310d9d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.431078 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "5af8c492-8d6e-4f55-942c-2e1132310d9d" (UID: "5af8c492-8d6e-4f55-942c-2e1132310d9d"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.431150 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5af8c492-8d6e-4f55-942c-2e1132310d9d-etc-swift\") pod \"5af8c492-8d6e-4f55-942c-2e1132310d9d\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.431464 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5af8c492-8d6e-4f55-942c-2e1132310d9d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "5af8c492-8d6e-4f55-942c-2e1132310d9d" (UID: "5af8c492-8d6e-4f55-942c-2e1132310d9d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.431531 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-swiftconf\") pod \"5af8c492-8d6e-4f55-942c-2e1132310d9d\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.431627 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfdgt\" (UniqueName: \"kubernetes.io/projected/5af8c492-8d6e-4f55-942c-2e1132310d9d-kube-api-access-vfdgt\") pod \"5af8c492-8d6e-4f55-942c-2e1132310d9d\" (UID: \"5af8c492-8d6e-4f55-942c-2e1132310d9d\") " Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.434221 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.434245 4876 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/5af8c492-8d6e-4f55-942c-2e1132310d9d-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.434256 4876 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/5af8c492-8d6e-4f55-942c-2e1132310d9d-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.437428 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5af8c492-8d6e-4f55-942c-2e1132310d9d" (UID: "5af8c492-8d6e-4f55-942c-2e1132310d9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.437458 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5af8c492-8d6e-4f55-942c-2e1132310d9d-kube-api-access-vfdgt" (OuterVolumeSpecName: "kube-api-access-vfdgt") pod "5af8c492-8d6e-4f55-942c-2e1132310d9d" (UID: "5af8c492-8d6e-4f55-942c-2e1132310d9d"). InnerVolumeSpecName "kube-api-access-vfdgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.437516 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "5af8c492-8d6e-4f55-942c-2e1132310d9d" (UID: "5af8c492-8d6e-4f55-942c-2e1132310d9d"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.439495 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "5af8c492-8d6e-4f55-942c-2e1132310d9d" (UID: "5af8c492-8d6e-4f55-942c-2e1132310d9d"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.537861 4876 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.537894 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfdgt\" (UniqueName: \"kubernetes.io/projected/5af8c492-8d6e-4f55-942c-2e1132310d9d-kube-api-access-vfdgt\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.537908 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.537921 4876 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/5af8c492-8d6e-4f55-942c-2e1132310d9d-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.687646 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2c9d-account-create-update-nlmt8" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.833583 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-chs2q"] Dec 15 07:11:53 crc kubenswrapper[4876]: W1215 07:11:53.840152 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2887b466_b7ab_45fe_9cf5_bff066201589.slice/crio-6806b9a584aae200d657cc0380b69f704774ec83ccac75349515ed531614686d WatchSource:0}: Error finding container 6806b9a584aae200d657cc0380b69f704774ec83ccac75349515ed531614686d: Status 404 returned error can't find the container with id 6806b9a584aae200d657cc0380b69f704774ec83ccac75349515ed531614686d Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.842070 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fd1ac31-1a68-498e-b4af-af562ad7acd3-operator-scripts\") pod \"2fd1ac31-1a68-498e-b4af-af562ad7acd3\" (UID: \"2fd1ac31-1a68-498e-b4af-af562ad7acd3\") " Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.842329 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsd24\" (UniqueName: \"kubernetes.io/projected/2fd1ac31-1a68-498e-b4af-af562ad7acd3-kube-api-access-wsd24\") pod \"2fd1ac31-1a68-498e-b4af-af562ad7acd3\" (UID: \"2fd1ac31-1a68-498e-b4af-af562ad7acd3\") " Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.843013 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fd1ac31-1a68-498e-b4af-af562ad7acd3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2fd1ac31-1a68-498e-b4af-af562ad7acd3" (UID: "2fd1ac31-1a68-498e-b4af-af562ad7acd3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.843583 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fd1ac31-1a68-498e-b4af-af562ad7acd3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.848386 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fd1ac31-1a68-498e-b4af-af562ad7acd3-kube-api-access-wsd24" (OuterVolumeSpecName: "kube-api-access-wsd24") pod "2fd1ac31-1a68-498e-b4af-af562ad7acd3" (UID: "2fd1ac31-1a68-498e-b4af-af562ad7acd3"). InnerVolumeSpecName "kube-api-access-wsd24". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:11:53 crc kubenswrapper[4876]: I1215 07:11:53.945500 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsd24\" (UniqueName: \"kubernetes.io/projected/2fd1ac31-1a68-498e-b4af-af562ad7acd3-kube-api-access-wsd24\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.302728 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-chs2q" event={"ID":"2887b466-b7ab-45fe-9cf5-bff066201589","Type":"ContainerStarted","Data":"6806b9a584aae200d657cc0380b69f704774ec83ccac75349515ed531614686d"} Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.304777 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2c9d-account-create-update-nlmt8" Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.307279 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2c9d-account-create-update-nlmt8" event={"ID":"2fd1ac31-1a68-498e-b4af-af562ad7acd3","Type":"ContainerDied","Data":"523d0f90c456013c425af6b302fd2457e9e299f245dbd17e379bb66aaeecb762"} Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.307322 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="523d0f90c456013c425af6b302fd2457e9e299f245dbd17e379bb66aaeecb762" Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.307394 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-c6x5c" Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.426193 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-c6x5c"] Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.463769 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-c6x5c"] Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.724335 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5af8c492-8d6e-4f55-942c-2e1132310d9d" path="/var/lib/kubelet/pods/5af8c492-8d6e-4f55-942c-2e1132310d9d/volumes" Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.810355 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qnfks" Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.964640 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcfqn\" (UniqueName: \"kubernetes.io/projected/9acc04fe-227b-4cfa-90d2-5d12001c6706-kube-api-access-wcfqn\") pod \"9acc04fe-227b-4cfa-90d2-5d12001c6706\" (UID: \"9acc04fe-227b-4cfa-90d2-5d12001c6706\") " Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.964761 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9acc04fe-227b-4cfa-90d2-5d12001c6706-operator-scripts\") pod \"9acc04fe-227b-4cfa-90d2-5d12001c6706\" (UID: \"9acc04fe-227b-4cfa-90d2-5d12001c6706\") " Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.965393 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9acc04fe-227b-4cfa-90d2-5d12001c6706-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9acc04fe-227b-4cfa-90d2-5d12001c6706" (UID: "9acc04fe-227b-4cfa-90d2-5d12001c6706"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:54 crc kubenswrapper[4876]: I1215 07:11:54.969287 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9acc04fe-227b-4cfa-90d2-5d12001c6706-kube-api-access-wcfqn" (OuterVolumeSpecName: "kube-api-access-wcfqn") pod "9acc04fe-227b-4cfa-90d2-5d12001c6706" (UID: "9acc04fe-227b-4cfa-90d2-5d12001c6706"). InnerVolumeSpecName "kube-api-access-wcfqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.066275 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcfqn\" (UniqueName: \"kubernetes.io/projected/9acc04fe-227b-4cfa-90d2-5d12001c6706-kube-api-access-wcfqn\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.066309 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9acc04fe-227b-4cfa-90d2-5d12001c6706-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.314941 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-qnfks" event={"ID":"9acc04fe-227b-4cfa-90d2-5d12001c6706","Type":"ContainerDied","Data":"ad15cf80abbabadc498a7189154f756369880ec16bb866c5a3f6ad8311770362"} Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.314984 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad15cf80abbabadc498a7189154f756369880ec16bb866c5a3f6ad8311770362" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.315026 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-qnfks" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.640764 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-b6z9x"] Dec 15 07:11:55 crc kubenswrapper[4876]: E1215 07:11:55.641162 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9acc04fe-227b-4cfa-90d2-5d12001c6706" containerName="mariadb-database-create" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.641178 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9acc04fe-227b-4cfa-90d2-5d12001c6706" containerName="mariadb-database-create" Dec 15 07:11:55 crc kubenswrapper[4876]: E1215 07:11:55.641194 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fd1ac31-1a68-498e-b4af-af562ad7acd3" containerName="mariadb-account-create-update" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.641204 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fd1ac31-1a68-498e-b4af-af562ad7acd3" containerName="mariadb-account-create-update" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.641374 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9acc04fe-227b-4cfa-90d2-5d12001c6706" containerName="mariadb-database-create" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.641401 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fd1ac31-1a68-498e-b4af-af562ad7acd3" containerName="mariadb-account-create-update" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.641882 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b6z9x" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.648802 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-b6z9x"] Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.732803 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6475-account-create-update-v8dvd"] Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.734960 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6475-account-create-update-v8dvd" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.741971 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.744608 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6475-account-create-update-v8dvd"] Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.799757 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/462339ed-ab46-4daf-b748-0257287f53cd-operator-scripts\") pod \"keystone-db-create-b6z9x\" (UID: \"462339ed-ab46-4daf-b748-0257287f53cd\") " pod="openstack/keystone-db-create-b6z9x" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.801014 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96sz8\" (UniqueName: \"kubernetes.io/projected/462339ed-ab46-4daf-b748-0257287f53cd-kube-api-access-96sz8\") pod \"keystone-db-create-b6z9x\" (UID: \"462339ed-ab46-4daf-b748-0257287f53cd\") " pod="openstack/keystone-db-create-b6z9x" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.874504 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-6l294"] Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.876569 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-6l294" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.883799 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-6l294"] Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.902967 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96sz8\" (UniqueName: \"kubernetes.io/projected/462339ed-ab46-4daf-b748-0257287f53cd-kube-api-access-96sz8\") pod \"keystone-db-create-b6z9x\" (UID: \"462339ed-ab46-4daf-b748-0257287f53cd\") " pod="openstack/keystone-db-create-b6z9x" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.903495 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h2jn\" (UniqueName: \"kubernetes.io/projected/24739a2a-2898-4b24-a9db-3845fbda2625-kube-api-access-9h2jn\") pod \"keystone-6475-account-create-update-v8dvd\" (UID: \"24739a2a-2898-4b24-a9db-3845fbda2625\") " pod="openstack/keystone-6475-account-create-update-v8dvd" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.903523 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24739a2a-2898-4b24-a9db-3845fbda2625-operator-scripts\") pod \"keystone-6475-account-create-update-v8dvd\" (UID: \"24739a2a-2898-4b24-a9db-3845fbda2625\") " pod="openstack/keystone-6475-account-create-update-v8dvd" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.903549 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/462339ed-ab46-4daf-b748-0257287f53cd-operator-scripts\") pod \"keystone-db-create-b6z9x\" (UID: \"462339ed-ab46-4daf-b748-0257287f53cd\") " pod="openstack/keystone-db-create-b6z9x" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.904344 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/462339ed-ab46-4daf-b748-0257287f53cd-operator-scripts\") pod \"keystone-db-create-b6z9x\" (UID: \"462339ed-ab46-4daf-b748-0257287f53cd\") " pod="openstack/keystone-db-create-b6z9x" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.931306 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96sz8\" (UniqueName: \"kubernetes.io/projected/462339ed-ab46-4daf-b748-0257287f53cd-kube-api-access-96sz8\") pod \"keystone-db-create-b6z9x\" (UID: \"462339ed-ab46-4daf-b748-0257287f53cd\") " pod="openstack/keystone-db-create-b6z9x" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.958555 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b6z9x" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.990541 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-4aed-account-create-update-l827q"] Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.993069 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4aed-account-create-update-l827q" Dec 15 07:11:55 crc kubenswrapper[4876]: I1215 07:11:55.998117 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.007188 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw79f\" (UniqueName: \"kubernetes.io/projected/bfed7402-4a97-4071-86db-ba4324dbe01d-kube-api-access-bw79f\") pod \"placement-db-create-6l294\" (UID: \"bfed7402-4a97-4071-86db-ba4324dbe01d\") " pod="openstack/placement-db-create-6l294" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.007396 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h2jn\" (UniqueName: \"kubernetes.io/projected/24739a2a-2898-4b24-a9db-3845fbda2625-kube-api-access-9h2jn\") pod \"keystone-6475-account-create-update-v8dvd\" (UID: \"24739a2a-2898-4b24-a9db-3845fbda2625\") " pod="openstack/keystone-6475-account-create-update-v8dvd" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.007443 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24739a2a-2898-4b24-a9db-3845fbda2625-operator-scripts\") pod \"keystone-6475-account-create-update-v8dvd\" (UID: \"24739a2a-2898-4b24-a9db-3845fbda2625\") " pod="openstack/keystone-6475-account-create-update-v8dvd" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.007494 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfed7402-4a97-4071-86db-ba4324dbe01d-operator-scripts\") pod \"placement-db-create-6l294\" (UID: \"bfed7402-4a97-4071-86db-ba4324dbe01d\") " pod="openstack/placement-db-create-6l294" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.008578 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24739a2a-2898-4b24-a9db-3845fbda2625-operator-scripts\") pod \"keystone-6475-account-create-update-v8dvd\" (UID: \"24739a2a-2898-4b24-a9db-3845fbda2625\") " pod="openstack/keystone-6475-account-create-update-v8dvd" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.040883 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h2jn\" (UniqueName: \"kubernetes.io/projected/24739a2a-2898-4b24-a9db-3845fbda2625-kube-api-access-9h2jn\") pod \"keystone-6475-account-create-update-v8dvd\" (UID: \"24739a2a-2898-4b24-a9db-3845fbda2625\") " pod="openstack/keystone-6475-account-create-update-v8dvd" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.045410 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-4aed-account-create-update-l827q"] Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.066843 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6475-account-create-update-v8dvd" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.109095 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfed7402-4a97-4071-86db-ba4324dbe01d-operator-scripts\") pod \"placement-db-create-6l294\" (UID: \"bfed7402-4a97-4071-86db-ba4324dbe01d\") " pod="openstack/placement-db-create-6l294" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.109170 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-operator-scripts\") pod \"placement-4aed-account-create-update-l827q\" (UID: \"15fd8b8b-f8fc-4b22-b367-0197ec641fa9\") " pod="openstack/placement-4aed-account-create-update-l827q" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.109191 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6pd5\" (UniqueName: \"kubernetes.io/projected/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-kube-api-access-n6pd5\") pod \"placement-4aed-account-create-update-l827q\" (UID: \"15fd8b8b-f8fc-4b22-b367-0197ec641fa9\") " pod="openstack/placement-4aed-account-create-update-l827q" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.109233 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw79f\" (UniqueName: \"kubernetes.io/projected/bfed7402-4a97-4071-86db-ba4324dbe01d-kube-api-access-bw79f\") pod \"placement-db-create-6l294\" (UID: \"bfed7402-4a97-4071-86db-ba4324dbe01d\") " pod="openstack/placement-db-create-6l294" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.110162 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfed7402-4a97-4071-86db-ba4324dbe01d-operator-scripts\") pod \"placement-db-create-6l294\" (UID: \"bfed7402-4a97-4071-86db-ba4324dbe01d\") " pod="openstack/placement-db-create-6l294" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.131305 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw79f\" (UniqueName: \"kubernetes.io/projected/bfed7402-4a97-4071-86db-ba4324dbe01d-kube-api-access-bw79f\") pod \"placement-db-create-6l294\" (UID: \"bfed7402-4a97-4071-86db-ba4324dbe01d\") " pod="openstack/placement-db-create-6l294" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.200672 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-6l294" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.210312 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-operator-scripts\") pod \"placement-4aed-account-create-update-l827q\" (UID: \"15fd8b8b-f8fc-4b22-b367-0197ec641fa9\") " pod="openstack/placement-4aed-account-create-update-l827q" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.210349 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6pd5\" (UniqueName: \"kubernetes.io/projected/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-kube-api-access-n6pd5\") pod \"placement-4aed-account-create-update-l827q\" (UID: \"15fd8b8b-f8fc-4b22-b367-0197ec641fa9\") " pod="openstack/placement-4aed-account-create-update-l827q" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.211223 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-operator-scripts\") pod \"placement-4aed-account-create-update-l827q\" (UID: \"15fd8b8b-f8fc-4b22-b367-0197ec641fa9\") " pod="openstack/placement-4aed-account-create-update-l827q" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.228993 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6pd5\" (UniqueName: \"kubernetes.io/projected/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-kube-api-access-n6pd5\") pod \"placement-4aed-account-create-update-l827q\" (UID: \"15fd8b8b-f8fc-4b22-b367-0197ec641fa9\") " pod="openstack/placement-4aed-account-create-update-l827q" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.330092 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4aed-account-create-update-l827q" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.487545 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-j6772"] Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.488747 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.497363 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-kkht2" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.499296 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.514020 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-j6772"] Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.616762 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-combined-ca-bundle\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.616879 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rf4j\" (UniqueName: \"kubernetes.io/projected/fef940f6-b480-49ee-a794-da81719669f8-kube-api-access-8rf4j\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.616916 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-db-sync-config-data\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.616952 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-config-data\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.719341 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-combined-ca-bundle\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.719422 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.719463 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rf4j\" (UniqueName: \"kubernetes.io/projected/fef940f6-b480-49ee-a794-da81719669f8-kube-api-access-8rf4j\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.719489 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-db-sync-config-data\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.719513 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-config-data\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: E1215 07:11:56.719720 4876 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 15 07:11:56 crc kubenswrapper[4876]: E1215 07:11:56.719755 4876 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 15 07:11:56 crc kubenswrapper[4876]: E1215 07:11:56.719833 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift podName:d91d3fea-2b02-48ad-b238-7a815dd36d22 nodeName:}" failed. No retries permitted until 2025-12-15 07:12:04.719809445 +0000 UTC m=+1250.290952376 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift") pod "swift-storage-0" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22") : configmap "swift-ring-files" not found Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.726030 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-combined-ca-bundle\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.727431 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-config-data\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.730616 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-db-sync-config-data\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.745261 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rf4j\" (UniqueName: \"kubernetes.io/projected/fef940f6-b480-49ee-a794-da81719669f8-kube-api-access-8rf4j\") pod \"glance-db-sync-j6772\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " pod="openstack/glance-db-sync-j6772" Dec 15 07:11:56 crc kubenswrapper[4876]: I1215 07:11:56.815136 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-j6772" Dec 15 07:11:58 crc kubenswrapper[4876]: I1215 07:11:58.016040 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 15 07:11:58 crc kubenswrapper[4876]: I1215 07:11:58.262030 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:11:58 crc kubenswrapper[4876]: I1215 07:11:58.318128 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-xvmw7"] Dec 15 07:11:58 crc kubenswrapper[4876]: I1215 07:11:58.318368 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" podUID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" containerName="dnsmasq-dns" containerID="cri-o://baae8b8f1ade9e1c7e7e227a5b8653658e1a84b2fd76d49bfaf80366220402dc" gracePeriod=10 Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.373690 4876 generic.go:334] "Generic (PLEG): container finished" podID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" containerID="baae8b8f1ade9e1c7e7e227a5b8653658e1a84b2fd76d49bfaf80366220402dc" exitCode=0 Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.373865 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" event={"ID":"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4","Type":"ContainerDied","Data":"baae8b8f1ade9e1c7e7e227a5b8653658e1a84b2fd76d49bfaf80366220402dc"} Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.483170 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.586888 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-config\") pod \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.587277 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-dns-svc\") pod \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.587452 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gsl5\" (UniqueName: \"kubernetes.io/projected/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-kube-api-access-6gsl5\") pod \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\" (UID: \"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4\") " Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.592694 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-kube-api-access-6gsl5" (OuterVolumeSpecName: "kube-api-access-6gsl5") pod "fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" (UID: "fdb4f16e-3e56-422b-8a47-087e3f7fb2d4"). InnerVolumeSpecName "kube-api-access-6gsl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.626483 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-4aed-account-create-update-l827q"] Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.657641 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6475-account-create-update-v8dvd"] Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.663778 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" (UID: "fdb4f16e-3e56-422b-8a47-087e3f7fb2d4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.677563 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-config" (OuterVolumeSpecName: "config") pod "fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" (UID: "fdb4f16e-3e56-422b-8a47-087e3f7fb2d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.689224 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gsl5\" (UniqueName: \"kubernetes.io/projected/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-kube-api-access-6gsl5\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.689256 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.689268 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.873873 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-6l294"] Dec 15 07:11:59 crc kubenswrapper[4876]: I1215 07:11:59.991490 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-j6772"] Dec 15 07:12:00 crc kubenswrapper[4876]: W1215 07:12:00.001219 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfef940f6_b480_49ee_a794_da81719669f8.slice/crio-52d8b9a31896dfca17541ae64823b032ca91653d343ea821ba26381d94f3b9d2 WatchSource:0}: Error finding container 52d8b9a31896dfca17541ae64823b032ca91653d343ea821ba26381d94f3b9d2: Status 404 returned error can't find the container with id 52d8b9a31896dfca17541ae64823b032ca91653d343ea821ba26381d94f3b9d2 Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.025266 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-b6z9x"] Dec 15 07:12:00 crc kubenswrapper[4876]: W1215 07:12:00.031405 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod462339ed_ab46_4daf_b748_0257287f53cd.slice/crio-0cdbe1f4bb683c470c5eca728636bbbf2af64ad204d6af36aeed6d33694c568a WatchSource:0}: Error finding container 0cdbe1f4bb683c470c5eca728636bbbf2af64ad204d6af36aeed6d33694c568a: Status 404 returned error can't find the container with id 0cdbe1f4bb683c470c5eca728636bbbf2af64ad204d6af36aeed6d33694c568a Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.381938 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-chs2q" event={"ID":"2887b466-b7ab-45fe-9cf5-bff066201589","Type":"ContainerStarted","Data":"ed46ba082042a7f7e73acc2c7c48ce907308018ef2b2dcd18542a3b290900f04"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.387064 4876 generic.go:334] "Generic (PLEG): container finished" podID="2a1020a1-7afe-46ee-b5c4-40a9290a05e1" containerID="654aa2b251a3e8d15baaeefdef4e09e6982ddf90bc92e310071d54b23d02d13f" exitCode=0 Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.387162 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2a1020a1-7afe-46ee-b5c4-40a9290a05e1","Type":"ContainerDied","Data":"654aa2b251a3e8d15baaeefdef4e09e6982ddf90bc92e310071d54b23d02d13f"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.390085 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" event={"ID":"fdb4f16e-3e56-422b-8a47-087e3f7fb2d4","Type":"ContainerDied","Data":"32090e38847bb775a1002c51bbed517b7cb063ba4136e6b7084bbc3bdf6c99cc"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.390151 4876 scope.go:117] "RemoveContainer" containerID="baae8b8f1ade9e1c7e7e227a5b8653658e1a84b2fd76d49bfaf80366220402dc" Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.390243 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-xvmw7" Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.391696 4876 generic.go:334] "Generic (PLEG): container finished" podID="15fd8b8b-f8fc-4b22-b367-0197ec641fa9" containerID="e5798be0fbda64fc3442d95a2f8d6fa9b84215a52bd5a2e3554dda263af70103" exitCode=0 Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.391778 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4aed-account-create-update-l827q" event={"ID":"15fd8b8b-f8fc-4b22-b367-0197ec641fa9","Type":"ContainerDied","Data":"e5798be0fbda64fc3442d95a2f8d6fa9b84215a52bd5a2e3554dda263af70103"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.391825 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4aed-account-create-update-l827q" event={"ID":"15fd8b8b-f8fc-4b22-b367-0197ec641fa9","Type":"ContainerStarted","Data":"492b562310d1b09866eb1289e2dd603b24801b531058dc1028f0bdaa89f3fb84"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.395759 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-j6772" event={"ID":"fef940f6-b480-49ee-a794-da81719669f8","Type":"ContainerStarted","Data":"52d8b9a31896dfca17541ae64823b032ca91653d343ea821ba26381d94f3b9d2"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.401880 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b6z9x" event={"ID":"462339ed-ab46-4daf-b748-0257287f53cd","Type":"ContainerStarted","Data":"c53e04d05e35f7e0da2f7a946c4159eb6b6ceba5eb6aed3cceb8823b3da1ebec"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.401947 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b6z9x" event={"ID":"462339ed-ab46-4daf-b748-0257287f53cd","Type":"ContainerStarted","Data":"0cdbe1f4bb683c470c5eca728636bbbf2af64ad204d6af36aeed6d33694c568a"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.404682 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-chs2q" podStartSLOduration=3.136381301 podStartE2EDuration="8.40466799s" podCreationTimestamp="2025-12-15 07:11:52 +0000 UTC" firstStartedPulling="2025-12-15 07:11:53.842916111 +0000 UTC m=+1239.414059032" lastFinishedPulling="2025-12-15 07:11:59.11120281 +0000 UTC m=+1244.682345721" observedRunningTime="2025-12-15 07:12:00.40195191 +0000 UTC m=+1245.973094831" watchObservedRunningTime="2025-12-15 07:12:00.40466799 +0000 UTC m=+1245.975810911" Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.405406 4876 generic.go:334] "Generic (PLEG): container finished" podID="bfed7402-4a97-4071-86db-ba4324dbe01d" containerID="b76d51a3e29dc17517e20451f413a12e5ed7e5c3573e1a5219731b5743af46c5" exitCode=0 Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.405479 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-6l294" event={"ID":"bfed7402-4a97-4071-86db-ba4324dbe01d","Type":"ContainerDied","Data":"b76d51a3e29dc17517e20451f413a12e5ed7e5c3573e1a5219731b5743af46c5"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.405505 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-6l294" event={"ID":"bfed7402-4a97-4071-86db-ba4324dbe01d","Type":"ContainerStarted","Data":"43bcb2c14a32a37d4726da4d87260b6c101052cf871193687ef61b1d43829586"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.407259 4876 generic.go:334] "Generic (PLEG): container finished" podID="24739a2a-2898-4b24-a9db-3845fbda2625" containerID="f844d2a20631c7f7c30aa53e2872ca1d23bbe5a8cc378b87c0c90ee03a7ea3ef" exitCode=0 Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.407306 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6475-account-create-update-v8dvd" event={"ID":"24739a2a-2898-4b24-a9db-3845fbda2625","Type":"ContainerDied","Data":"f844d2a20631c7f7c30aa53e2872ca1d23bbe5a8cc378b87c0c90ee03a7ea3ef"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.407346 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6475-account-create-update-v8dvd" event={"ID":"24739a2a-2898-4b24-a9db-3845fbda2625","Type":"ContainerStarted","Data":"8f2be7f20c557785268eccdd4f31611669f3dc195e2945fad08a1badd3a77165"} Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.414818 4876 scope.go:117] "RemoveContainer" containerID="01785db434663a22831e397d07cd68cc554909e3d864d7647440f3f76ca00722" Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.439510 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-b6z9x" podStartSLOduration=5.439493526 podStartE2EDuration="5.439493526s" podCreationTimestamp="2025-12-15 07:11:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:12:00.436568921 +0000 UTC m=+1246.007711832" watchObservedRunningTime="2025-12-15 07:12:00.439493526 +0000 UTC m=+1246.010636447" Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.512944 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-xvmw7"] Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.520681 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-xvmw7"] Dec 15 07:12:00 crc kubenswrapper[4876]: I1215 07:12:00.718844 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" path="/var/lib/kubelet/pods/fdb4f16e-3e56-422b-8a47-087e3f7fb2d4/volumes" Dec 15 07:12:01 crc kubenswrapper[4876]: I1215 07:12:01.418727 4876 generic.go:334] "Generic (PLEG): container finished" podID="462339ed-ab46-4daf-b748-0257287f53cd" containerID="c53e04d05e35f7e0da2f7a946c4159eb6b6ceba5eb6aed3cceb8823b3da1ebec" exitCode=0 Dec 15 07:12:01 crc kubenswrapper[4876]: I1215 07:12:01.418802 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b6z9x" event={"ID":"462339ed-ab46-4daf-b748-0257287f53cd","Type":"ContainerDied","Data":"c53e04d05e35f7e0da2f7a946c4159eb6b6ceba5eb6aed3cceb8823b3da1ebec"} Dec 15 07:12:01 crc kubenswrapper[4876]: I1215 07:12:01.420479 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2a1020a1-7afe-46ee-b5c4-40a9290a05e1","Type":"ContainerStarted","Data":"ef11419ef47c4d37303e61eee95b4d049fd5985bb7603bfa53e9e8035128272c"} Dec 15 07:12:01 crc kubenswrapper[4876]: I1215 07:12:01.420997 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:12:01 crc kubenswrapper[4876]: I1215 07:12:01.501014 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=60.500997764 podStartE2EDuration="1m0.500997764s" podCreationTimestamp="2025-12-15 07:11:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:12:01.498724366 +0000 UTC m=+1247.069867287" watchObservedRunningTime="2025-12-15 07:12:01.500997764 +0000 UTC m=+1247.072140675" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.399007 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4aed-account-create-update-l827q" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.403285 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6475-account-create-update-v8dvd" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.437174 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6475-account-create-update-v8dvd" event={"ID":"24739a2a-2898-4b24-a9db-3845fbda2625","Type":"ContainerDied","Data":"8f2be7f20c557785268eccdd4f31611669f3dc195e2945fad08a1badd3a77165"} Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.437221 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f2be7f20c557785268eccdd4f31611669f3dc195e2945fad08a1badd3a77165" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.437291 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6475-account-create-update-v8dvd" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.445185 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4aed-account-create-update-l827q" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.445518 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4aed-account-create-update-l827q" event={"ID":"15fd8b8b-f8fc-4b22-b367-0197ec641fa9","Type":"ContainerDied","Data":"492b562310d1b09866eb1289e2dd603b24801b531058dc1028f0bdaa89f3fb84"} Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.445546 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="492b562310d1b09866eb1289e2dd603b24801b531058dc1028f0bdaa89f3fb84" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.527396 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-6l294" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.570339 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6pd5\" (UniqueName: \"kubernetes.io/projected/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-kube-api-access-n6pd5\") pod \"15fd8b8b-f8fc-4b22-b367-0197ec641fa9\" (UID: \"15fd8b8b-f8fc-4b22-b367-0197ec641fa9\") " Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.570416 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24739a2a-2898-4b24-a9db-3845fbda2625-operator-scripts\") pod \"24739a2a-2898-4b24-a9db-3845fbda2625\" (UID: \"24739a2a-2898-4b24-a9db-3845fbda2625\") " Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.570532 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-operator-scripts\") pod \"15fd8b8b-f8fc-4b22-b367-0197ec641fa9\" (UID: \"15fd8b8b-f8fc-4b22-b367-0197ec641fa9\") " Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.570569 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9h2jn\" (UniqueName: \"kubernetes.io/projected/24739a2a-2898-4b24-a9db-3845fbda2625-kube-api-access-9h2jn\") pod \"24739a2a-2898-4b24-a9db-3845fbda2625\" (UID: \"24739a2a-2898-4b24-a9db-3845fbda2625\") " Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.571527 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "15fd8b8b-f8fc-4b22-b367-0197ec641fa9" (UID: "15fd8b8b-f8fc-4b22-b367-0197ec641fa9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.572285 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.574069 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24739a2a-2898-4b24-a9db-3845fbda2625-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "24739a2a-2898-4b24-a9db-3845fbda2625" (UID: "24739a2a-2898-4b24-a9db-3845fbda2625"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.578283 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-kube-api-access-n6pd5" (OuterVolumeSpecName: "kube-api-access-n6pd5") pod "15fd8b8b-f8fc-4b22-b367-0197ec641fa9" (UID: "15fd8b8b-f8fc-4b22-b367-0197ec641fa9"). InnerVolumeSpecName "kube-api-access-n6pd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.578647 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24739a2a-2898-4b24-a9db-3845fbda2625-kube-api-access-9h2jn" (OuterVolumeSpecName: "kube-api-access-9h2jn") pod "24739a2a-2898-4b24-a9db-3845fbda2625" (UID: "24739a2a-2898-4b24-a9db-3845fbda2625"). InnerVolumeSpecName "kube-api-access-9h2jn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.673789 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bw79f\" (UniqueName: \"kubernetes.io/projected/bfed7402-4a97-4071-86db-ba4324dbe01d-kube-api-access-bw79f\") pod \"bfed7402-4a97-4071-86db-ba4324dbe01d\" (UID: \"bfed7402-4a97-4071-86db-ba4324dbe01d\") " Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.673900 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfed7402-4a97-4071-86db-ba4324dbe01d-operator-scripts\") pod \"bfed7402-4a97-4071-86db-ba4324dbe01d\" (UID: \"bfed7402-4a97-4071-86db-ba4324dbe01d\") " Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.674530 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9h2jn\" (UniqueName: \"kubernetes.io/projected/24739a2a-2898-4b24-a9db-3845fbda2625-kube-api-access-9h2jn\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.674554 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6pd5\" (UniqueName: \"kubernetes.io/projected/15fd8b8b-f8fc-4b22-b367-0197ec641fa9-kube-api-access-n6pd5\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.674590 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24739a2a-2898-4b24-a9db-3845fbda2625-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.674733 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfed7402-4a97-4071-86db-ba4324dbe01d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bfed7402-4a97-4071-86db-ba4324dbe01d" (UID: "bfed7402-4a97-4071-86db-ba4324dbe01d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.677519 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfed7402-4a97-4071-86db-ba4324dbe01d-kube-api-access-bw79f" (OuterVolumeSpecName: "kube-api-access-bw79f") pod "bfed7402-4a97-4071-86db-ba4324dbe01d" (UID: "bfed7402-4a97-4071-86db-ba4324dbe01d"). InnerVolumeSpecName "kube-api-access-bw79f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.777606 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfed7402-4a97-4071-86db-ba4324dbe01d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.778083 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bw79f\" (UniqueName: \"kubernetes.io/projected/bfed7402-4a97-4071-86db-ba4324dbe01d-kube-api-access-bw79f\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.816639 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b6z9x" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.981025 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96sz8\" (UniqueName: \"kubernetes.io/projected/462339ed-ab46-4daf-b748-0257287f53cd-kube-api-access-96sz8\") pod \"462339ed-ab46-4daf-b748-0257287f53cd\" (UID: \"462339ed-ab46-4daf-b748-0257287f53cd\") " Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.981410 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/462339ed-ab46-4daf-b748-0257287f53cd-operator-scripts\") pod \"462339ed-ab46-4daf-b748-0257287f53cd\" (UID: \"462339ed-ab46-4daf-b748-0257287f53cd\") " Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.982467 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/462339ed-ab46-4daf-b748-0257287f53cd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "462339ed-ab46-4daf-b748-0257287f53cd" (UID: "462339ed-ab46-4daf-b748-0257287f53cd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:02 crc kubenswrapper[4876]: I1215 07:12:02.986014 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/462339ed-ab46-4daf-b748-0257287f53cd-kube-api-access-96sz8" (OuterVolumeSpecName: "kube-api-access-96sz8") pod "462339ed-ab46-4daf-b748-0257287f53cd" (UID: "462339ed-ab46-4daf-b748-0257287f53cd"). InnerVolumeSpecName "kube-api-access-96sz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:03 crc kubenswrapper[4876]: I1215 07:12:03.084366 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/462339ed-ab46-4daf-b748-0257287f53cd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:03 crc kubenswrapper[4876]: I1215 07:12:03.084424 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96sz8\" (UniqueName: \"kubernetes.io/projected/462339ed-ab46-4daf-b748-0257287f53cd-kube-api-access-96sz8\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:03 crc kubenswrapper[4876]: I1215 07:12:03.456821 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-b6z9x" event={"ID":"462339ed-ab46-4daf-b748-0257287f53cd","Type":"ContainerDied","Data":"0cdbe1f4bb683c470c5eca728636bbbf2af64ad204d6af36aeed6d33694c568a"} Dec 15 07:12:03 crc kubenswrapper[4876]: I1215 07:12:03.457287 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cdbe1f4bb683c470c5eca728636bbbf2af64ad204d6af36aeed6d33694c568a" Dec 15 07:12:03 crc kubenswrapper[4876]: I1215 07:12:03.456900 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-b6z9x" Dec 15 07:12:03 crc kubenswrapper[4876]: I1215 07:12:03.458877 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-6l294" event={"ID":"bfed7402-4a97-4071-86db-ba4324dbe01d","Type":"ContainerDied","Data":"43bcb2c14a32a37d4726da4d87260b6c101052cf871193687ef61b1d43829586"} Dec 15 07:12:03 crc kubenswrapper[4876]: I1215 07:12:03.458935 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43bcb2c14a32a37d4726da4d87260b6c101052cf871193687ef61b1d43829586" Dec 15 07:12:03 crc kubenswrapper[4876]: I1215 07:12:03.459002 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-6l294" Dec 15 07:12:03 crc kubenswrapper[4876]: I1215 07:12:03.463194 4876 generic.go:334] "Generic (PLEG): container finished" podID="db3f4964-0cca-4527-93de-457292de4be7" containerID="8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f" exitCode=0 Dec 15 07:12:03 crc kubenswrapper[4876]: I1215 07:12:03.463248 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"db3f4964-0cca-4527-93de-457292de4be7","Type":"ContainerDied","Data":"8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f"} Dec 15 07:12:04 crc kubenswrapper[4876]: I1215 07:12:04.478672 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"db3f4964-0cca-4527-93de-457292de4be7","Type":"ContainerStarted","Data":"0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42"} Dec 15 07:12:04 crc kubenswrapper[4876]: I1215 07:12:04.480159 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 15 07:12:04 crc kubenswrapper[4876]: I1215 07:12:04.510200 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=47.766326529 podStartE2EDuration="1m3.510181994s" podCreationTimestamp="2025-12-15 07:11:01 +0000 UTC" firstStartedPulling="2025-12-15 07:11:07.771963653 +0000 UTC m=+1193.343106564" lastFinishedPulling="2025-12-15 07:11:23.515819118 +0000 UTC m=+1209.086962029" observedRunningTime="2025-12-15 07:12:04.50107733 +0000 UTC m=+1250.072220241" watchObservedRunningTime="2025-12-15 07:12:04.510181994 +0000 UTC m=+1250.081324905" Dec 15 07:12:04 crc kubenswrapper[4876]: I1215 07:12:04.821026 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:12:04 crc kubenswrapper[4876]: E1215 07:12:04.821290 4876 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 15 07:12:04 crc kubenswrapper[4876]: E1215 07:12:04.821538 4876 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 15 07:12:04 crc kubenswrapper[4876]: E1215 07:12:04.821630 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift podName:d91d3fea-2b02-48ad-b238-7a815dd36d22 nodeName:}" failed. No retries permitted until 2025-12-15 07:12:20.821608142 +0000 UTC m=+1266.392751053 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift") pod "swift-storage-0" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22") : configmap "swift-ring-files" not found Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.091867 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-pr47l" podUID="df0aba13-046d-4950-bfa3-c873c535847f" containerName="ovn-controller" probeResult="failure" output=< Dec 15 07:12:06 crc kubenswrapper[4876]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 15 07:12:06 crc kubenswrapper[4876]: > Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.093665 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.112281 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.340408 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-pr47l-config-8ql2r"] Dec 15 07:12:06 crc kubenswrapper[4876]: E1215 07:12:06.340797 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" containerName="dnsmasq-dns" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.340817 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" containerName="dnsmasq-dns" Dec 15 07:12:06 crc kubenswrapper[4876]: E1215 07:12:06.340868 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24739a2a-2898-4b24-a9db-3845fbda2625" containerName="mariadb-account-create-update" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.340878 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="24739a2a-2898-4b24-a9db-3845fbda2625" containerName="mariadb-account-create-update" Dec 15 07:12:06 crc kubenswrapper[4876]: E1215 07:12:06.340896 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfed7402-4a97-4071-86db-ba4324dbe01d" containerName="mariadb-database-create" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.340905 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfed7402-4a97-4071-86db-ba4324dbe01d" containerName="mariadb-database-create" Dec 15 07:12:06 crc kubenswrapper[4876]: E1215 07:12:06.340917 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15fd8b8b-f8fc-4b22-b367-0197ec641fa9" containerName="mariadb-account-create-update" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.340925 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="15fd8b8b-f8fc-4b22-b367-0197ec641fa9" containerName="mariadb-account-create-update" Dec 15 07:12:06 crc kubenswrapper[4876]: E1215 07:12:06.340944 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="462339ed-ab46-4daf-b748-0257287f53cd" containerName="mariadb-database-create" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.340953 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="462339ed-ab46-4daf-b748-0257287f53cd" containerName="mariadb-database-create" Dec 15 07:12:06 crc kubenswrapper[4876]: E1215 07:12:06.340974 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" containerName="init" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.340982 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" containerName="init" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.341220 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="24739a2a-2898-4b24-a9db-3845fbda2625" containerName="mariadb-account-create-update" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.341242 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfed7402-4a97-4071-86db-ba4324dbe01d" containerName="mariadb-database-create" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.341253 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="462339ed-ab46-4daf-b748-0257287f53cd" containerName="mariadb-database-create" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.341270 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="15fd8b8b-f8fc-4b22-b367-0197ec641fa9" containerName="mariadb-account-create-update" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.341286 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdb4f16e-3e56-422b-8a47-087e3f7fb2d4" containerName="dnsmasq-dns" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.341880 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.344901 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.349746 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-pr47l-config-8ql2r"] Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.475521 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-additional-scripts\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.475854 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-scripts\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.475909 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-log-ovn\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.475930 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.475985 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run-ovn\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.476007 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsj7z\" (UniqueName: \"kubernetes.io/projected/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-kube-api-access-rsj7z\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.577939 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-additional-scripts\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.578003 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-scripts\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.578087 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-log-ovn\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.578234 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.578449 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run-ovn\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.578488 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsj7z\" (UniqueName: \"kubernetes.io/projected/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-kube-api-access-rsj7z\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.579501 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-additional-scripts\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.581087 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-scripts\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.582292 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-log-ovn\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.582355 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.582404 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run-ovn\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.611815 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsj7z\" (UniqueName: \"kubernetes.io/projected/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-kube-api-access-rsj7z\") pod \"ovn-controller-pr47l-config-8ql2r\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:06 crc kubenswrapper[4876]: I1215 07:12:06.660147 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:07 crc kubenswrapper[4876]: I1215 07:12:07.144878 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-pr47l-config-8ql2r"] Dec 15 07:12:07 crc kubenswrapper[4876]: I1215 07:12:07.500063 4876 generic.go:334] "Generic (PLEG): container finished" podID="2887b466-b7ab-45fe-9cf5-bff066201589" containerID="ed46ba082042a7f7e73acc2c7c48ce907308018ef2b2dcd18542a3b290900f04" exitCode=0 Dec 15 07:12:07 crc kubenswrapper[4876]: I1215 07:12:07.500151 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-chs2q" event={"ID":"2887b466-b7ab-45fe-9cf5-bff066201589","Type":"ContainerDied","Data":"ed46ba082042a7f7e73acc2c7c48ce907308018ef2b2dcd18542a3b290900f04"} Dec 15 07:12:07 crc kubenswrapper[4876]: I1215 07:12:07.502241 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pr47l-config-8ql2r" event={"ID":"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8","Type":"ContainerStarted","Data":"88f6f660c0b05b56a3dcaed5b5d03aebc04cafd93b5e8e17531b3ae786379a38"} Dec 15 07:12:08 crc kubenswrapper[4876]: I1215 07:12:08.528962 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pr47l-config-8ql2r" event={"ID":"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8","Type":"ContainerStarted","Data":"847f28629996158a7717ee6e842c76acbbe38b89059a464bbb28e36869ac3b2c"} Dec 15 07:12:08 crc kubenswrapper[4876]: I1215 07:12:08.554867 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-pr47l-config-8ql2r" podStartSLOduration=2.554848722 podStartE2EDuration="2.554848722s" podCreationTimestamp="2025-12-15 07:12:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:12:08.548119549 +0000 UTC m=+1254.119262470" watchObservedRunningTime="2025-12-15 07:12:08.554848722 +0000 UTC m=+1254.125991633" Dec 15 07:12:09 crc kubenswrapper[4876]: I1215 07:12:09.538022 4876 generic.go:334] "Generic (PLEG): container finished" podID="9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" containerID="847f28629996158a7717ee6e842c76acbbe38b89059a464bbb28e36869ac3b2c" exitCode=0 Dec 15 07:12:09 crc kubenswrapper[4876]: I1215 07:12:09.538080 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pr47l-config-8ql2r" event={"ID":"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8","Type":"ContainerDied","Data":"847f28629996158a7717ee6e842c76acbbe38b89059a464bbb28e36869ac3b2c"} Dec 15 07:12:11 crc kubenswrapper[4876]: I1215 07:12:11.066136 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-pr47l" Dec 15 07:12:13 crc kubenswrapper[4876]: I1215 07:12:13.117340 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:12:18 crc kubenswrapper[4876]: E1215 07:12:18.025824 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f" Dec 15 07:12:18 crc kubenswrapper[4876]: E1215 07:12:18.026575 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8rf4j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-j6772_openstack(fef940f6-b480-49ee-a794-da81719669f8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:12:18 crc kubenswrapper[4876]: E1215 07:12:18.029224 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-j6772" podUID="fef940f6-b480-49ee-a794-da81719669f8" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.052792 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.058612 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165149 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2887b466-b7ab-45fe-9cf5-bff066201589-etc-swift\") pod \"2887b466-b7ab-45fe-9cf5-bff066201589\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165214 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run-ovn\") pod \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165230 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-log-ovn\") pod \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165275 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-swiftconf\") pod \"2887b466-b7ab-45fe-9cf5-bff066201589\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165299 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rsj7z\" (UniqueName: \"kubernetes.io/projected/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-kube-api-access-rsj7z\") pod \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165389 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-ring-data-devices\") pod \"2887b466-b7ab-45fe-9cf5-bff066201589\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165411 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-combined-ca-bundle\") pod \"2887b466-b7ab-45fe-9cf5-bff066201589\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165447 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-scripts\") pod \"2887b466-b7ab-45fe-9cf5-bff066201589\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165477 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run\") pod \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165523 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-dispersionconf\") pod \"2887b466-b7ab-45fe-9cf5-bff066201589\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165560 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqmds\" (UniqueName: \"kubernetes.io/projected/2887b466-b7ab-45fe-9cf5-bff066201589-kube-api-access-zqmds\") pod \"2887b466-b7ab-45fe-9cf5-bff066201589\" (UID: \"2887b466-b7ab-45fe-9cf5-bff066201589\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165598 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-scripts\") pod \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165621 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-additional-scripts\") pod \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\" (UID: \"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8\") " Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.165620 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" (UID: "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.166009 4876 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.166155 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" (UID: "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.166558 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run" (OuterVolumeSpecName: "var-run") pod "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" (UID: "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.166682 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" (UID: "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.166941 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2887b466-b7ab-45fe-9cf5-bff066201589" (UID: "2887b466-b7ab-45fe-9cf5-bff066201589"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.167396 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2887b466-b7ab-45fe-9cf5-bff066201589-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2887b466-b7ab-45fe-9cf5-bff066201589" (UID: "2887b466-b7ab-45fe-9cf5-bff066201589"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.167603 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-scripts" (OuterVolumeSpecName: "scripts") pod "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" (UID: "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.172363 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-kube-api-access-rsj7z" (OuterVolumeSpecName: "kube-api-access-rsj7z") pod "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" (UID: "9c9c6d5c-6d8d-484d-9dd6-e55278546fd8"). InnerVolumeSpecName "kube-api-access-rsj7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.174465 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2887b466-b7ab-45fe-9cf5-bff066201589-kube-api-access-zqmds" (OuterVolumeSpecName: "kube-api-access-zqmds") pod "2887b466-b7ab-45fe-9cf5-bff066201589" (UID: "2887b466-b7ab-45fe-9cf5-bff066201589"). InnerVolumeSpecName "kube-api-access-zqmds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.174973 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2887b466-b7ab-45fe-9cf5-bff066201589" (UID: "2887b466-b7ab-45fe-9cf5-bff066201589"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.189598 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-scripts" (OuterVolumeSpecName: "scripts") pod "2887b466-b7ab-45fe-9cf5-bff066201589" (UID: "2887b466-b7ab-45fe-9cf5-bff066201589"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.195915 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2887b466-b7ab-45fe-9cf5-bff066201589" (UID: "2887b466-b7ab-45fe-9cf5-bff066201589"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.197831 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2887b466-b7ab-45fe-9cf5-bff066201589" (UID: "2887b466-b7ab-45fe-9cf5-bff066201589"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267823 4876 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-run\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267864 4876 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267887 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqmds\" (UniqueName: \"kubernetes.io/projected/2887b466-b7ab-45fe-9cf5-bff066201589-kube-api-access-zqmds\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267899 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267910 4876 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267923 4876 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2887b466-b7ab-45fe-9cf5-bff066201589-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267936 4876 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267947 4876 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267960 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rsj7z\" (UniqueName: \"kubernetes.io/projected/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8-kube-api-access-rsj7z\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267971 4876 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267982 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2887b466-b7ab-45fe-9cf5-bff066201589-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.267993 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2887b466-b7ab-45fe-9cf5-bff066201589-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.618873 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pr47l-config-8ql2r" event={"ID":"9c9c6d5c-6d8d-484d-9dd6-e55278546fd8","Type":"ContainerDied","Data":"88f6f660c0b05b56a3dcaed5b5d03aebc04cafd93b5e8e17531b3ae786379a38"} Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.618940 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88f6f660c0b05b56a3dcaed5b5d03aebc04cafd93b5e8e17531b3ae786379a38" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.619094 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pr47l-config-8ql2r" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.620917 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-chs2q" event={"ID":"2887b466-b7ab-45fe-9cf5-bff066201589","Type":"ContainerDied","Data":"6806b9a584aae200d657cc0380b69f704774ec83ccac75349515ed531614686d"} Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.620948 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-chs2q" Dec 15 07:12:18 crc kubenswrapper[4876]: I1215 07:12:18.620959 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6806b9a584aae200d657cc0380b69f704774ec83ccac75349515ed531614686d" Dec 15 07:12:18 crc kubenswrapper[4876]: E1215 07:12:18.624368 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f\\\"\"" pod="openstack/glance-db-sync-j6772" podUID="fef940f6-b480-49ee-a794-da81719669f8" Dec 15 07:12:19 crc kubenswrapper[4876]: I1215 07:12:19.187772 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-pr47l-config-8ql2r"] Dec 15 07:12:19 crc kubenswrapper[4876]: I1215 07:12:19.203325 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-pr47l-config-8ql2r"] Dec 15 07:12:20 crc kubenswrapper[4876]: I1215 07:12:20.720389 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" path="/var/lib/kubelet/pods/9c9c6d5c-6d8d-484d-9dd6-e55278546fd8/volumes" Dec 15 07:12:20 crc kubenswrapper[4876]: I1215 07:12:20.912205 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:12:20 crc kubenswrapper[4876]: I1215 07:12:20.918137 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") pod \"swift-storage-0\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " pod="openstack/swift-storage-0" Dec 15 07:12:21 crc kubenswrapper[4876]: I1215 07:12:21.076911 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 15 07:12:21 crc kubenswrapper[4876]: I1215 07:12:21.613113 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 15 07:12:21 crc kubenswrapper[4876]: I1215 07:12:21.645690 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"fde5a4e492ac9fef9a8cd82a7e603d6e2880e94f051566d04045b93a28f7d7aa"} Dec 15 07:12:22 crc kubenswrapper[4876]: I1215 07:12:22.751353 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.105078 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-bsw24"] Dec 15 07:12:23 crc kubenswrapper[4876]: E1215 07:12:23.105717 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2887b466-b7ab-45fe-9cf5-bff066201589" containerName="swift-ring-rebalance" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.105738 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2887b466-b7ab-45fe-9cf5-bff066201589" containerName="swift-ring-rebalance" Dec 15 07:12:23 crc kubenswrapper[4876]: E1215 07:12:23.105767 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" containerName="ovn-config" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.105775 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" containerName="ovn-config" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.105971 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c9c6d5c-6d8d-484d-9dd6-e55278546fd8" containerName="ovn-config" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.105992 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2887b466-b7ab-45fe-9cf5-bff066201589" containerName="swift-ring-rebalance" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.106629 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bsw24" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.124913 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-bsw24"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.200555 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-qrp2v"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.201570 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-qrp2v" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.213216 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-qrp2v"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.229013 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-2a14-account-create-update-98zfz"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.230005 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2a14-account-create-update-98zfz" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.232180 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.238789 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2a14-account-create-update-98zfz"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.251592 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvk9v\" (UniqueName: \"kubernetes.io/projected/59ba3714-4516-4744-8efb-604800685bba-kube-api-access-qvk9v\") pod \"cinder-db-create-bsw24\" (UID: \"59ba3714-4516-4744-8efb-604800685bba\") " pod="openstack/cinder-db-create-bsw24" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.251711 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59ba3714-4516-4744-8efb-604800685bba-operator-scripts\") pod \"cinder-db-create-bsw24\" (UID: \"59ba3714-4516-4744-8efb-604800685bba\") " pod="openstack/cinder-db-create-bsw24" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.310685 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-fc60-account-create-update-q7j8n"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.311681 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fc60-account-create-update-q7j8n" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.316511 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.320034 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-fc60-account-create-update-q7j8n"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.352676 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-operator-scripts\") pod \"barbican-2a14-account-create-update-98zfz\" (UID: \"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b\") " pod="openstack/barbican-2a14-account-create-update-98zfz" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.352765 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-operator-scripts\") pod \"barbican-db-create-qrp2v\" (UID: \"58f80b2c-85ec-4f49-976c-6bc8510e1fdb\") " pod="openstack/barbican-db-create-qrp2v" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.352841 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftgqr\" (UniqueName: \"kubernetes.io/projected/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-kube-api-access-ftgqr\") pod \"barbican-2a14-account-create-update-98zfz\" (UID: \"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b\") " pod="openstack/barbican-2a14-account-create-update-98zfz" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.352927 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59ba3714-4516-4744-8efb-604800685bba-operator-scripts\") pod \"cinder-db-create-bsw24\" (UID: \"59ba3714-4516-4744-8efb-604800685bba\") " pod="openstack/cinder-db-create-bsw24" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.353730 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59ba3714-4516-4744-8efb-604800685bba-operator-scripts\") pod \"cinder-db-create-bsw24\" (UID: \"59ba3714-4516-4744-8efb-604800685bba\") " pod="openstack/cinder-db-create-bsw24" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.353792 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvk9v\" (UniqueName: \"kubernetes.io/projected/59ba3714-4516-4744-8efb-604800685bba-kube-api-access-qvk9v\") pod \"cinder-db-create-bsw24\" (UID: \"59ba3714-4516-4744-8efb-604800685bba\") " pod="openstack/cinder-db-create-bsw24" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.354227 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4r7v\" (UniqueName: \"kubernetes.io/projected/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-kube-api-access-d4r7v\") pod \"barbican-db-create-qrp2v\" (UID: \"58f80b2c-85ec-4f49-976c-6bc8510e1fdb\") " pod="openstack/barbican-db-create-qrp2v" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.371157 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvk9v\" (UniqueName: \"kubernetes.io/projected/59ba3714-4516-4744-8efb-604800685bba-kube-api-access-qvk9v\") pod \"cinder-db-create-bsw24\" (UID: \"59ba3714-4516-4744-8efb-604800685bba\") " pod="openstack/cinder-db-create-bsw24" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.403352 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-b45dn"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.405620 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-b45dn" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.412437 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-b45dn"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.432821 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bsw24" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.455203 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-operator-scripts\") pod \"barbican-2a14-account-create-update-98zfz\" (UID: \"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b\") " pod="openstack/barbican-2a14-account-create-update-98zfz" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.455258 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-operator-scripts\") pod \"barbican-db-create-qrp2v\" (UID: \"58f80b2c-85ec-4f49-976c-6bc8510e1fdb\") " pod="openstack/barbican-db-create-qrp2v" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.455316 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftgqr\" (UniqueName: \"kubernetes.io/projected/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-kube-api-access-ftgqr\") pod \"barbican-2a14-account-create-update-98zfz\" (UID: \"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b\") " pod="openstack/barbican-2a14-account-create-update-98zfz" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.455425 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x4tw\" (UniqueName: \"kubernetes.io/projected/33d12d16-080f-4799-bae3-497babf7078f-kube-api-access-6x4tw\") pod \"cinder-fc60-account-create-update-q7j8n\" (UID: \"33d12d16-080f-4799-bae3-497babf7078f\") " pod="openstack/cinder-fc60-account-create-update-q7j8n" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.455464 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33d12d16-080f-4799-bae3-497babf7078f-operator-scripts\") pod \"cinder-fc60-account-create-update-q7j8n\" (UID: \"33d12d16-080f-4799-bae3-497babf7078f\") " pod="openstack/cinder-fc60-account-create-update-q7j8n" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.455491 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4r7v\" (UniqueName: \"kubernetes.io/projected/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-kube-api-access-d4r7v\") pod \"barbican-db-create-qrp2v\" (UID: \"58f80b2c-85ec-4f49-976c-6bc8510e1fdb\") " pod="openstack/barbican-db-create-qrp2v" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.456767 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-operator-scripts\") pod \"barbican-2a14-account-create-update-98zfz\" (UID: \"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b\") " pod="openstack/barbican-2a14-account-create-update-98zfz" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.456797 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-operator-scripts\") pod \"barbican-db-create-qrp2v\" (UID: \"58f80b2c-85ec-4f49-976c-6bc8510e1fdb\") " pod="openstack/barbican-db-create-qrp2v" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.488775 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftgqr\" (UniqueName: \"kubernetes.io/projected/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-kube-api-access-ftgqr\") pod \"barbican-2a14-account-create-update-98zfz\" (UID: \"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b\") " pod="openstack/barbican-2a14-account-create-update-98zfz" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.489406 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4r7v\" (UniqueName: \"kubernetes.io/projected/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-kube-api-access-d4r7v\") pod \"barbican-db-create-qrp2v\" (UID: \"58f80b2c-85ec-4f49-976c-6bc8510e1fdb\") " pod="openstack/barbican-db-create-qrp2v" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.503843 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-94d2c"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.504774 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.526426 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-qrp2v" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.526955 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-v76mv" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.527219 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.527401 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.527586 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.548667 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-94d2c"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.577762 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9783849-3311-44bf-a6e0-126a3c1c6c9c-operator-scripts\") pod \"neutron-db-create-b45dn\" (UID: \"d9783849-3311-44bf-a6e0-126a3c1c6c9c\") " pod="openstack/neutron-db-create-b45dn" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.577852 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g64gt\" (UniqueName: \"kubernetes.io/projected/d9783849-3311-44bf-a6e0-126a3c1c6c9c-kube-api-access-g64gt\") pod \"neutron-db-create-b45dn\" (UID: \"d9783849-3311-44bf-a6e0-126a3c1c6c9c\") " pod="openstack/neutron-db-create-b45dn" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.578286 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x4tw\" (UniqueName: \"kubernetes.io/projected/33d12d16-080f-4799-bae3-497babf7078f-kube-api-access-6x4tw\") pod \"cinder-fc60-account-create-update-q7j8n\" (UID: \"33d12d16-080f-4799-bae3-497babf7078f\") " pod="openstack/cinder-fc60-account-create-update-q7j8n" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.578346 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33d12d16-080f-4799-bae3-497babf7078f-operator-scripts\") pod \"cinder-fc60-account-create-update-q7j8n\" (UID: \"33d12d16-080f-4799-bae3-497babf7078f\") " pod="openstack/cinder-fc60-account-create-update-q7j8n" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.579163 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33d12d16-080f-4799-bae3-497babf7078f-operator-scripts\") pod \"cinder-fc60-account-create-update-q7j8n\" (UID: \"33d12d16-080f-4799-bae3-497babf7078f\") " pod="openstack/cinder-fc60-account-create-update-q7j8n" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.579381 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2a14-account-create-update-98zfz" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.585042 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-a915-account-create-update-rtgq8"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.586047 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a915-account-create-update-rtgq8" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.590382 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.621617 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x4tw\" (UniqueName: \"kubernetes.io/projected/33d12d16-080f-4799-bae3-497babf7078f-kube-api-access-6x4tw\") pod \"cinder-fc60-account-create-update-q7j8n\" (UID: \"33d12d16-080f-4799-bae3-497babf7078f\") " pod="openstack/cinder-fc60-account-create-update-q7j8n" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.636972 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a915-account-create-update-rtgq8"] Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.643730 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fc60-account-create-update-q7j8n" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.679799 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-config-data\") pod \"keystone-db-sync-94d2c\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.679884 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-combined-ca-bundle\") pod \"keystone-db-sync-94d2c\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.679915 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b2rj\" (UniqueName: \"kubernetes.io/projected/7657f9d3-81bb-41d0-9c40-43f697875a5e-kube-api-access-7b2rj\") pod \"keystone-db-sync-94d2c\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.679952 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9783849-3311-44bf-a6e0-126a3c1c6c9c-operator-scripts\") pod \"neutron-db-create-b45dn\" (UID: \"d9783849-3311-44bf-a6e0-126a3c1c6c9c\") " pod="openstack/neutron-db-create-b45dn" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.679988 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g64gt\" (UniqueName: \"kubernetes.io/projected/d9783849-3311-44bf-a6e0-126a3c1c6c9c-kube-api-access-g64gt\") pod \"neutron-db-create-b45dn\" (UID: \"d9783849-3311-44bf-a6e0-126a3c1c6c9c\") " pod="openstack/neutron-db-create-b45dn" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.681360 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9783849-3311-44bf-a6e0-126a3c1c6c9c-operator-scripts\") pod \"neutron-db-create-b45dn\" (UID: \"d9783849-3311-44bf-a6e0-126a3c1c6c9c\") " pod="openstack/neutron-db-create-b45dn" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.692865 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9"} Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.704841 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g64gt\" (UniqueName: \"kubernetes.io/projected/d9783849-3311-44bf-a6e0-126a3c1c6c9c-kube-api-access-g64gt\") pod \"neutron-db-create-b45dn\" (UID: \"d9783849-3311-44bf-a6e0-126a3c1c6c9c\") " pod="openstack/neutron-db-create-b45dn" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.770426 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-b45dn" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.781215 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-combined-ca-bundle\") pod \"keystone-db-sync-94d2c\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.781278 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b2rj\" (UniqueName: \"kubernetes.io/projected/7657f9d3-81bb-41d0-9c40-43f697875a5e-kube-api-access-7b2rj\") pod \"keystone-db-sync-94d2c\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.781380 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-operator-scripts\") pod \"neutron-a915-account-create-update-rtgq8\" (UID: \"ab6f16e6-ce19-4399-806b-e3d25e47cb5f\") " pod="openstack/neutron-a915-account-create-update-rtgq8" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.781430 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-config-data\") pod \"keystone-db-sync-94d2c\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.781467 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrwhl\" (UniqueName: \"kubernetes.io/projected/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-kube-api-access-xrwhl\") pod \"neutron-a915-account-create-update-rtgq8\" (UID: \"ab6f16e6-ce19-4399-806b-e3d25e47cb5f\") " pod="openstack/neutron-a915-account-create-update-rtgq8" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.786055 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-config-data\") pod \"keystone-db-sync-94d2c\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.787005 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-combined-ca-bundle\") pod \"keystone-db-sync-94d2c\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.820202 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b2rj\" (UniqueName: \"kubernetes.io/projected/7657f9d3-81bb-41d0-9c40-43f697875a5e-kube-api-access-7b2rj\") pod \"keystone-db-sync-94d2c\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.882824 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-operator-scripts\") pod \"neutron-a915-account-create-update-rtgq8\" (UID: \"ab6f16e6-ce19-4399-806b-e3d25e47cb5f\") " pod="openstack/neutron-a915-account-create-update-rtgq8" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.882907 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrwhl\" (UniqueName: \"kubernetes.io/projected/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-kube-api-access-xrwhl\") pod \"neutron-a915-account-create-update-rtgq8\" (UID: \"ab6f16e6-ce19-4399-806b-e3d25e47cb5f\") " pod="openstack/neutron-a915-account-create-update-rtgq8" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.883692 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-operator-scripts\") pod \"neutron-a915-account-create-update-rtgq8\" (UID: \"ab6f16e6-ce19-4399-806b-e3d25e47cb5f\") " pod="openstack/neutron-a915-account-create-update-rtgq8" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.905607 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.913877 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrwhl\" (UniqueName: \"kubernetes.io/projected/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-kube-api-access-xrwhl\") pod \"neutron-a915-account-create-update-rtgq8\" (UID: \"ab6f16e6-ce19-4399-806b-e3d25e47cb5f\") " pod="openstack/neutron-a915-account-create-update-rtgq8" Dec 15 07:12:23 crc kubenswrapper[4876]: I1215 07:12:23.930095 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a915-account-create-update-rtgq8" Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.298010 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-qrp2v"] Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.412065 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-2a14-account-create-update-98zfz"] Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.427255 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-bsw24"] Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.443933 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-b45dn"] Dec 15 07:12:24 crc kubenswrapper[4876]: W1215 07:12:24.465797 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb00bf5d6_e50c_41c6_ba57_b39a84d1bc6b.slice/crio-99b0347a99f8ba3235d053f0aad63fc0fe6a25a8d766501572e6ced8a24f2f3c WatchSource:0}: Error finding container 99b0347a99f8ba3235d053f0aad63fc0fe6a25a8d766501572e6ced8a24f2f3c: Status 404 returned error can't find the container with id 99b0347a99f8ba3235d053f0aad63fc0fe6a25a8d766501572e6ced8a24f2f3c Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.470493 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-fc60-account-create-update-q7j8n"] Dec 15 07:12:24 crc kubenswrapper[4876]: W1215 07:12:24.479170 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9783849_3311_44bf_a6e0_126a3c1c6c9c.slice/crio-8e490924ac756815f5ad09d43b90fcaa105202d13481a9d125428c332f4bc865 WatchSource:0}: Error finding container 8e490924ac756815f5ad09d43b90fcaa105202d13481a9d125428c332f4bc865: Status 404 returned error can't find the container with id 8e490924ac756815f5ad09d43b90fcaa105202d13481a9d125428c332f4bc865 Dec 15 07:12:24 crc kubenswrapper[4876]: W1215 07:12:24.684163 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab6f16e6_ce19_4399_806b_e3d25e47cb5f.slice/crio-b54c4e2df9cbc96c186c75d97608c69f37e6fe1434336c40358970f691e63481 WatchSource:0}: Error finding container b54c4e2df9cbc96c186c75d97608c69f37e6fe1434336c40358970f691e63481: Status 404 returned error can't find the container with id b54c4e2df9cbc96c186c75d97608c69f37e6fe1434336c40358970f691e63481 Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.687497 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a915-account-create-update-rtgq8"] Dec 15 07:12:24 crc kubenswrapper[4876]: W1215 07:12:24.698362 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7657f9d3_81bb_41d0_9c40_43f697875a5e.slice/crio-4d89f79673256149279c4f025c3d197abb17d74e4092c7b44b3f7d85ed1f7050 WatchSource:0}: Error finding container 4d89f79673256149279c4f025c3d197abb17d74e4092c7b44b3f7d85ed1f7050: Status 404 returned error can't find the container with id 4d89f79673256149279c4f025c3d197abb17d74e4092c7b44b3f7d85ed1f7050 Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.741131 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-94d2c"] Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.746261 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-qrp2v" event={"ID":"58f80b2c-85ec-4f49-976c-6bc8510e1fdb","Type":"ContainerStarted","Data":"2cca7c7259ed425f641916acdea26339326d32b207ace7893163bedd0ef98990"} Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.752076 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2a14-account-create-update-98zfz" event={"ID":"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b","Type":"ContainerStarted","Data":"99b0347a99f8ba3235d053f0aad63fc0fe6a25a8d766501572e6ced8a24f2f3c"} Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.755862 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-b45dn" event={"ID":"d9783849-3311-44bf-a6e0-126a3c1c6c9c","Type":"ContainerStarted","Data":"8e490924ac756815f5ad09d43b90fcaa105202d13481a9d125428c332f4bc865"} Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.761895 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9"} Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.762193 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c"} Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.762914 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-94d2c" event={"ID":"7657f9d3-81bb-41d0-9c40-43f697875a5e","Type":"ContainerStarted","Data":"4d89f79673256149279c4f025c3d197abb17d74e4092c7b44b3f7d85ed1f7050"} Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.765401 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bsw24" event={"ID":"59ba3714-4516-4744-8efb-604800685bba","Type":"ContainerStarted","Data":"6b10c01108daba05a36e20699caf027d4c39e0c25e15aef56e42d98286567d2f"} Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.766476 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-fc60-account-create-update-q7j8n" event={"ID":"33d12d16-080f-4799-bae3-497babf7078f","Type":"ContainerStarted","Data":"5267f7b0f2b0854f0201e62c5851ee45d660016be93d9c1e1890b21364074d7a"} Dec 15 07:12:24 crc kubenswrapper[4876]: I1215 07:12:24.768580 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a915-account-create-update-rtgq8" event={"ID":"ab6f16e6-ce19-4399-806b-e3d25e47cb5f","Type":"ContainerStarted","Data":"b54c4e2df9cbc96c186c75d97608c69f37e6fe1434336c40358970f691e63481"} Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.777980 4876 generic.go:334] "Generic (PLEG): container finished" podID="59ba3714-4516-4744-8efb-604800685bba" containerID="0f5d9ca0c1bc15780b7a7bc437cc9333e10e8e53a44d9af71fffcef078b3c8e9" exitCode=0 Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.778020 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bsw24" event={"ID":"59ba3714-4516-4744-8efb-604800685bba","Type":"ContainerDied","Data":"0f5d9ca0c1bc15780b7a7bc437cc9333e10e8e53a44d9af71fffcef078b3c8e9"} Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.780200 4876 generic.go:334] "Generic (PLEG): container finished" podID="33d12d16-080f-4799-bae3-497babf7078f" containerID="facab2a461fe55e9056a81da264f9887d011082603b2cc7deb19ddec7d4a94ea" exitCode=0 Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.780259 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-fc60-account-create-update-q7j8n" event={"ID":"33d12d16-080f-4799-bae3-497babf7078f","Type":"ContainerDied","Data":"facab2a461fe55e9056a81da264f9887d011082603b2cc7deb19ddec7d4a94ea"} Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.782217 4876 generic.go:334] "Generic (PLEG): container finished" podID="ab6f16e6-ce19-4399-806b-e3d25e47cb5f" containerID="2c2e940445d6dda54c77baa30e35731e8e3964b78fb838b5e59b11f0b5cb276a" exitCode=0 Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.782262 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a915-account-create-update-rtgq8" event={"ID":"ab6f16e6-ce19-4399-806b-e3d25e47cb5f","Type":"ContainerDied","Data":"2c2e940445d6dda54c77baa30e35731e8e3964b78fb838b5e59b11f0b5cb276a"} Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.783396 4876 generic.go:334] "Generic (PLEG): container finished" podID="58f80b2c-85ec-4f49-976c-6bc8510e1fdb" containerID="0eb1deab71a70bcb348ef12bc6274d54e1904bd98469f82095989401793f7b45" exitCode=0 Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.783434 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-qrp2v" event={"ID":"58f80b2c-85ec-4f49-976c-6bc8510e1fdb","Type":"ContainerDied","Data":"0eb1deab71a70bcb348ef12bc6274d54e1904bd98469f82095989401793f7b45"} Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.784398 4876 generic.go:334] "Generic (PLEG): container finished" podID="b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b" containerID="7c14998c06187633b99f45ab4b9d37c286bd1a3e2dda2a643a5a6f4a0e1a4dbf" exitCode=0 Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.784432 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2a14-account-create-update-98zfz" event={"ID":"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b","Type":"ContainerDied","Data":"7c14998c06187633b99f45ab4b9d37c286bd1a3e2dda2a643a5a6f4a0e1a4dbf"} Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.785416 4876 generic.go:334] "Generic (PLEG): container finished" podID="d9783849-3311-44bf-a6e0-126a3c1c6c9c" containerID="c8e4c3af47b449c9ddbf86cea9464d5a787639dab63bdd9badeca8df1d278810" exitCode=0 Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.785455 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-b45dn" event={"ID":"d9783849-3311-44bf-a6e0-126a3c1c6c9c","Type":"ContainerDied","Data":"c8e4c3af47b449c9ddbf86cea9464d5a787639dab63bdd9badeca8df1d278810"} Dec 15 07:12:25 crc kubenswrapper[4876]: I1215 07:12:25.793293 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e"} Dec 15 07:12:26 crc kubenswrapper[4876]: I1215 07:12:26.852032 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d"} Dec 15 07:12:26 crc kubenswrapper[4876]: I1215 07:12:26.852492 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9"} Dec 15 07:12:26 crc kubenswrapper[4876]: I1215 07:12:26.852506 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0"} Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.323304 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.323550 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.499406 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a915-account-create-update-rtgq8" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.635688 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrwhl\" (UniqueName: \"kubernetes.io/projected/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-kube-api-access-xrwhl\") pod \"ab6f16e6-ce19-4399-806b-e3d25e47cb5f\" (UID: \"ab6f16e6-ce19-4399-806b-e3d25e47cb5f\") " Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.635824 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-operator-scripts\") pod \"ab6f16e6-ce19-4399-806b-e3d25e47cb5f\" (UID: \"ab6f16e6-ce19-4399-806b-e3d25e47cb5f\") " Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.637235 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ab6f16e6-ce19-4399-806b-e3d25e47cb5f" (UID: "ab6f16e6-ce19-4399-806b-e3d25e47cb5f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.644354 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-kube-api-access-xrwhl" (OuterVolumeSpecName: "kube-api-access-xrwhl") pod "ab6f16e6-ce19-4399-806b-e3d25e47cb5f" (UID: "ab6f16e6-ce19-4399-806b-e3d25e47cb5f"). InnerVolumeSpecName "kube-api-access-xrwhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.696386 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fc60-account-create-update-q7j8n" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.708169 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bsw24" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.719410 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2a14-account-create-update-98zfz" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.737763 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrwhl\" (UniqueName: \"kubernetes.io/projected/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-kube-api-access-xrwhl\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.738646 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab6f16e6-ce19-4399-806b-e3d25e47cb5f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.839442 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftgqr\" (UniqueName: \"kubernetes.io/projected/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-kube-api-access-ftgqr\") pod \"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b\" (UID: \"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b\") " Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.839566 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59ba3714-4516-4744-8efb-604800685bba-operator-scripts\") pod \"59ba3714-4516-4744-8efb-604800685bba\" (UID: \"59ba3714-4516-4744-8efb-604800685bba\") " Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.839589 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-operator-scripts\") pod \"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b\" (UID: \"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b\") " Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.839640 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvk9v\" (UniqueName: \"kubernetes.io/projected/59ba3714-4516-4744-8efb-604800685bba-kube-api-access-qvk9v\") pod \"59ba3714-4516-4744-8efb-604800685bba\" (UID: \"59ba3714-4516-4744-8efb-604800685bba\") " Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.839684 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33d12d16-080f-4799-bae3-497babf7078f-operator-scripts\") pod \"33d12d16-080f-4799-bae3-497babf7078f\" (UID: \"33d12d16-080f-4799-bae3-497babf7078f\") " Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.839706 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6x4tw\" (UniqueName: \"kubernetes.io/projected/33d12d16-080f-4799-bae3-497babf7078f-kube-api-access-6x4tw\") pod \"33d12d16-080f-4799-bae3-497babf7078f\" (UID: \"33d12d16-080f-4799-bae3-497babf7078f\") " Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.840610 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b" (UID: "b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.841181 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33d12d16-080f-4799-bae3-497babf7078f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "33d12d16-080f-4799-bae3-497babf7078f" (UID: "33d12d16-080f-4799-bae3-497babf7078f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.844854 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59ba3714-4516-4744-8efb-604800685bba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "59ba3714-4516-4744-8efb-604800685bba" (UID: "59ba3714-4516-4744-8efb-604800685bba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.844961 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59ba3714-4516-4744-8efb-604800685bba-kube-api-access-qvk9v" (OuterVolumeSpecName: "kube-api-access-qvk9v") pod "59ba3714-4516-4744-8efb-604800685bba" (UID: "59ba3714-4516-4744-8efb-604800685bba"). InnerVolumeSpecName "kube-api-access-qvk9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.846214 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-kube-api-access-ftgqr" (OuterVolumeSpecName: "kube-api-access-ftgqr") pod "b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b" (UID: "b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b"). InnerVolumeSpecName "kube-api-access-ftgqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.846241 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33d12d16-080f-4799-bae3-497babf7078f-kube-api-access-6x4tw" (OuterVolumeSpecName: "kube-api-access-6x4tw") pod "33d12d16-080f-4799-bae3-497babf7078f" (UID: "33d12d16-080f-4799-bae3-497babf7078f"). InnerVolumeSpecName "kube-api-access-6x4tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.860795 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bsw24" event={"ID":"59ba3714-4516-4744-8efb-604800685bba","Type":"ContainerDied","Data":"6b10c01108daba05a36e20699caf027d4c39e0c25e15aef56e42d98286567d2f"} Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.860833 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b10c01108daba05a36e20699caf027d4c39e0c25e15aef56e42d98286567d2f" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.861709 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bsw24" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.862587 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-fc60-account-create-update-q7j8n" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.862614 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-fc60-account-create-update-q7j8n" event={"ID":"33d12d16-080f-4799-bae3-497babf7078f","Type":"ContainerDied","Data":"5267f7b0f2b0854f0201e62c5851ee45d660016be93d9c1e1890b21364074d7a"} Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.862657 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5267f7b0f2b0854f0201e62c5851ee45d660016be93d9c1e1890b21364074d7a" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.866348 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a915-account-create-update-rtgq8" event={"ID":"ab6f16e6-ce19-4399-806b-e3d25e47cb5f","Type":"ContainerDied","Data":"b54c4e2df9cbc96c186c75d97608c69f37e6fe1434336c40358970f691e63481"} Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.866448 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b54c4e2df9cbc96c186c75d97608c69f37e6fe1434336c40358970f691e63481" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.866526 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a915-account-create-update-rtgq8" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.869827 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-2a14-account-create-update-98zfz" event={"ID":"b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b","Type":"ContainerDied","Data":"99b0347a99f8ba3235d053f0aad63fc0fe6a25a8d766501572e6ced8a24f2f3c"} Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.869896 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99b0347a99f8ba3235d053f0aad63fc0fe6a25a8d766501572e6ced8a24f2f3c" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.870020 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-2a14-account-create-update-98zfz" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.881701 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646"} Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.942514 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvk9v\" (UniqueName: \"kubernetes.io/projected/59ba3714-4516-4744-8efb-604800685bba-kube-api-access-qvk9v\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.942544 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33d12d16-080f-4799-bae3-497babf7078f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.942554 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6x4tw\" (UniqueName: \"kubernetes.io/projected/33d12d16-080f-4799-bae3-497babf7078f-kube-api-access-6x4tw\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.942565 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftgqr\" (UniqueName: \"kubernetes.io/projected/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-kube-api-access-ftgqr\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.942574 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59ba3714-4516-4744-8efb-604800685bba-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:27 crc kubenswrapper[4876]: I1215 07:12:27.942583 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.153868 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-qrp2v" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.167197 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-b45dn" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.293578 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9783849-3311-44bf-a6e0-126a3c1c6c9c-operator-scripts\") pod \"d9783849-3311-44bf-a6e0-126a3c1c6c9c\" (UID: \"d9783849-3311-44bf-a6e0-126a3c1c6c9c\") " Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.293636 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g64gt\" (UniqueName: \"kubernetes.io/projected/d9783849-3311-44bf-a6e0-126a3c1c6c9c-kube-api-access-g64gt\") pod \"d9783849-3311-44bf-a6e0-126a3c1c6c9c\" (UID: \"d9783849-3311-44bf-a6e0-126a3c1c6c9c\") " Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.293677 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4r7v\" (UniqueName: \"kubernetes.io/projected/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-kube-api-access-d4r7v\") pod \"58f80b2c-85ec-4f49-976c-6bc8510e1fdb\" (UID: \"58f80b2c-85ec-4f49-976c-6bc8510e1fdb\") " Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.293794 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-operator-scripts\") pod \"58f80b2c-85ec-4f49-976c-6bc8510e1fdb\" (UID: \"58f80b2c-85ec-4f49-976c-6bc8510e1fdb\") " Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.294310 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9783849-3311-44bf-a6e0-126a3c1c6c9c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d9783849-3311-44bf-a6e0-126a3c1c6c9c" (UID: "d9783849-3311-44bf-a6e0-126a3c1c6c9c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.294661 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "58f80b2c-85ec-4f49-976c-6bc8510e1fdb" (UID: "58f80b2c-85ec-4f49-976c-6bc8510e1fdb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.298559 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-kube-api-access-d4r7v" (OuterVolumeSpecName: "kube-api-access-d4r7v") pod "58f80b2c-85ec-4f49-976c-6bc8510e1fdb" (UID: "58f80b2c-85ec-4f49-976c-6bc8510e1fdb"). InnerVolumeSpecName "kube-api-access-d4r7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.299087 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9783849-3311-44bf-a6e0-126a3c1c6c9c-kube-api-access-g64gt" (OuterVolumeSpecName: "kube-api-access-g64gt") pod "d9783849-3311-44bf-a6e0-126a3c1c6c9c" (UID: "d9783849-3311-44bf-a6e0-126a3c1c6c9c"). InnerVolumeSpecName "kube-api-access-g64gt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.396235 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.396725 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9783849-3311-44bf-a6e0-126a3c1c6c9c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.396806 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g64gt\" (UniqueName: \"kubernetes.io/projected/d9783849-3311-44bf-a6e0-126a3c1c6c9c-kube-api-access-g64gt\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.396868 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4r7v\" (UniqueName: \"kubernetes.io/projected/58f80b2c-85ec-4f49-976c-6bc8510e1fdb-kube-api-access-d4r7v\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.912134 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-qrp2v" event={"ID":"58f80b2c-85ec-4f49-976c-6bc8510e1fdb","Type":"ContainerDied","Data":"2cca7c7259ed425f641916acdea26339326d32b207ace7893163bedd0ef98990"} Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.912554 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cca7c7259ed425f641916acdea26339326d32b207ace7893163bedd0ef98990" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.912173 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-qrp2v" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.913872 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-b45dn" event={"ID":"d9783849-3311-44bf-a6e0-126a3c1c6c9c","Type":"ContainerDied","Data":"8e490924ac756815f5ad09d43b90fcaa105202d13481a9d125428c332f4bc865"} Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.913921 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e490924ac756815f5ad09d43b90fcaa105202d13481a9d125428c332f4bc865" Dec 15 07:12:30 crc kubenswrapper[4876]: I1215 07:12:30.913960 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-b45dn" Dec 15 07:12:32 crc kubenswrapper[4876]: I1215 07:12:32.796945 4876 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod15fd8b8b-f8fc-4b22-b367-0197ec641fa9"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod15fd8b8b-f8fc-4b22-b367-0197ec641fa9] : Timed out while waiting for systemd to remove kubepods-besteffort-pod15fd8b8b_f8fc_4b22_b367_0197ec641fa9.slice" Dec 15 07:12:35 crc kubenswrapper[4876]: I1215 07:12:35.000579 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-94d2c" event={"ID":"7657f9d3-81bb-41d0-9c40-43f697875a5e","Type":"ContainerStarted","Data":"1beed7fcbdaa351311ae03b42d33cb35cabddbf9032f3b4575067ede39025164"} Dec 15 07:12:35 crc kubenswrapper[4876]: I1215 07:12:35.003297 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-j6772" event={"ID":"fef940f6-b480-49ee-a794-da81719669f8","Type":"ContainerStarted","Data":"49d1be0197bd7019b41f95b753ef8f25c732d0358cd8ff9b1cadcccd7de28623"} Dec 15 07:12:35 crc kubenswrapper[4876]: I1215 07:12:35.039804 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-94d2c" podStartSLOduration=2.541636848 podStartE2EDuration="12.039786814s" podCreationTimestamp="2025-12-15 07:12:23 +0000 UTC" firstStartedPulling="2025-12-15 07:12:24.709716011 +0000 UTC m=+1270.280858922" lastFinishedPulling="2025-12-15 07:12:34.207865977 +0000 UTC m=+1279.779008888" observedRunningTime="2025-12-15 07:12:35.035847903 +0000 UTC m=+1280.606990824" watchObservedRunningTime="2025-12-15 07:12:35.039786814 +0000 UTC m=+1280.610929716" Dec 15 07:12:35 crc kubenswrapper[4876]: I1215 07:12:35.076852 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-j6772" podStartSLOduration=4.8789572 podStartE2EDuration="39.076830078s" podCreationTimestamp="2025-12-15 07:11:56 +0000 UTC" firstStartedPulling="2025-12-15 07:12:00.003200474 +0000 UTC m=+1245.574343385" lastFinishedPulling="2025-12-15 07:12:34.201073352 +0000 UTC m=+1279.772216263" observedRunningTime="2025-12-15 07:12:35.066439561 +0000 UTC m=+1280.637582472" watchObservedRunningTime="2025-12-15 07:12:35.076830078 +0000 UTC m=+1280.647972989" Dec 15 07:12:36 crc kubenswrapper[4876]: I1215 07:12:36.018677 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0"} Dec 15 07:12:36 crc kubenswrapper[4876]: I1215 07:12:36.018984 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328"} Dec 15 07:12:36 crc kubenswrapper[4876]: I1215 07:12:36.018996 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b"} Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.036451 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e"} Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.036937 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d"} Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.036964 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81"} Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.036974 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerStarted","Data":"3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df"} Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.085694 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.853019907 podStartE2EDuration="50.085676435s" podCreationTimestamp="2025-12-15 07:11:47 +0000 UTC" firstStartedPulling="2025-12-15 07:12:21.626184388 +0000 UTC m=+1267.197327309" lastFinishedPulling="2025-12-15 07:12:34.858840926 +0000 UTC m=+1280.429983837" observedRunningTime="2025-12-15 07:12:37.08237068 +0000 UTC m=+1282.653513591" watchObservedRunningTime="2025-12-15 07:12:37.085676435 +0000 UTC m=+1282.656819346" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.363994 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8467b54bcc-4gh2v"] Dec 15 07:12:37 crc kubenswrapper[4876]: E1215 07:12:37.364368 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9783849-3311-44bf-a6e0-126a3c1c6c9c" containerName="mariadb-database-create" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364383 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9783849-3311-44bf-a6e0-126a3c1c6c9c" containerName="mariadb-database-create" Dec 15 07:12:37 crc kubenswrapper[4876]: E1215 07:12:37.364394 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59ba3714-4516-4744-8efb-604800685bba" containerName="mariadb-database-create" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364401 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="59ba3714-4516-4744-8efb-604800685bba" containerName="mariadb-database-create" Dec 15 07:12:37 crc kubenswrapper[4876]: E1215 07:12:37.364421 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b" containerName="mariadb-account-create-update" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364428 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b" containerName="mariadb-account-create-update" Dec 15 07:12:37 crc kubenswrapper[4876]: E1215 07:12:37.364446 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58f80b2c-85ec-4f49-976c-6bc8510e1fdb" containerName="mariadb-database-create" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364452 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="58f80b2c-85ec-4f49-976c-6bc8510e1fdb" containerName="mariadb-database-create" Dec 15 07:12:37 crc kubenswrapper[4876]: E1215 07:12:37.364462 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33d12d16-080f-4799-bae3-497babf7078f" containerName="mariadb-account-create-update" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364467 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="33d12d16-080f-4799-bae3-497babf7078f" containerName="mariadb-account-create-update" Dec 15 07:12:37 crc kubenswrapper[4876]: E1215 07:12:37.364478 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab6f16e6-ce19-4399-806b-e3d25e47cb5f" containerName="mariadb-account-create-update" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364484 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab6f16e6-ce19-4399-806b-e3d25e47cb5f" containerName="mariadb-account-create-update" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364625 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="33d12d16-080f-4799-bae3-497babf7078f" containerName="mariadb-account-create-update" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364634 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b" containerName="mariadb-account-create-update" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364643 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="58f80b2c-85ec-4f49-976c-6bc8510e1fdb" containerName="mariadb-database-create" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364652 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9783849-3311-44bf-a6e0-126a3c1c6c9c" containerName="mariadb-database-create" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364662 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="59ba3714-4516-4744-8efb-604800685bba" containerName="mariadb-database-create" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.364675 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab6f16e6-ce19-4399-806b-e3d25e47cb5f" containerName="mariadb-account-create-update" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.365491 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.367208 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.376057 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8467b54bcc-4gh2v"] Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.513186 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-swift-storage-0\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.513232 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-config\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.513416 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6d7c\" (UniqueName: \"kubernetes.io/projected/ff39e07e-4499-48d6-9faf-0f38bd986e3e-kube-api-access-f6d7c\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.513843 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-nb\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.514002 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-sb\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.514080 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-svc\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.615544 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-nb\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.615626 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-sb\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.615664 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-svc\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.615706 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-swift-storage-0\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.615731 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-config\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.615765 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6d7c\" (UniqueName: \"kubernetes.io/projected/ff39e07e-4499-48d6-9faf-0f38bd986e3e-kube-api-access-f6d7c\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.616564 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-sb\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.616584 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-nb\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.616693 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-swift-storage-0\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.617171 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-config\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.617250 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-svc\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.639983 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6d7c\" (UniqueName: \"kubernetes.io/projected/ff39e07e-4499-48d6-9faf-0f38bd986e3e-kube-api-access-f6d7c\") pod \"dnsmasq-dns-8467b54bcc-4gh2v\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:37 crc kubenswrapper[4876]: I1215 07:12:37.695937 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:38 crc kubenswrapper[4876]: I1215 07:12:38.123624 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8467b54bcc-4gh2v"] Dec 15 07:12:38 crc kubenswrapper[4876]: W1215 07:12:38.127918 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff39e07e_4499_48d6_9faf_0f38bd986e3e.slice/crio-fbdd48899f1b67051aa890ce4d4c48eb8fe93863e994504731e0eaed3d0801b7 WatchSource:0}: Error finding container fbdd48899f1b67051aa890ce4d4c48eb8fe93863e994504731e0eaed3d0801b7: Status 404 returned error can't find the container with id fbdd48899f1b67051aa890ce4d4c48eb8fe93863e994504731e0eaed3d0801b7 Dec 15 07:12:39 crc kubenswrapper[4876]: I1215 07:12:39.051420 4876 generic.go:334] "Generic (PLEG): container finished" podID="ff39e07e-4499-48d6-9faf-0f38bd986e3e" containerID="38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4" exitCode=0 Dec 15 07:12:39 crc kubenswrapper[4876]: I1215 07:12:39.052274 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" event={"ID":"ff39e07e-4499-48d6-9faf-0f38bd986e3e","Type":"ContainerDied","Data":"38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4"} Dec 15 07:12:39 crc kubenswrapper[4876]: I1215 07:12:39.052309 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" event={"ID":"ff39e07e-4499-48d6-9faf-0f38bd986e3e","Type":"ContainerStarted","Data":"fbdd48899f1b67051aa890ce4d4c48eb8fe93863e994504731e0eaed3d0801b7"} Dec 15 07:12:39 crc kubenswrapper[4876]: I1215 07:12:39.055292 4876 generic.go:334] "Generic (PLEG): container finished" podID="7657f9d3-81bb-41d0-9c40-43f697875a5e" containerID="1beed7fcbdaa351311ae03b42d33cb35cabddbf9032f3b4575067ede39025164" exitCode=0 Dec 15 07:12:39 crc kubenswrapper[4876]: I1215 07:12:39.055324 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-94d2c" event={"ID":"7657f9d3-81bb-41d0-9c40-43f697875a5e","Type":"ContainerDied","Data":"1beed7fcbdaa351311ae03b42d33cb35cabddbf9032f3b4575067ede39025164"} Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.067848 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" event={"ID":"ff39e07e-4499-48d6-9faf-0f38bd986e3e","Type":"ContainerStarted","Data":"d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d"} Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.068248 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.096035 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" podStartSLOduration=3.096015655 podStartE2EDuration="3.096015655s" podCreationTimestamp="2025-12-15 07:12:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:12:40.087041864 +0000 UTC m=+1285.658184795" watchObservedRunningTime="2025-12-15 07:12:40.096015655 +0000 UTC m=+1285.667158586" Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.387849 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.471877 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-combined-ca-bundle\") pod \"7657f9d3-81bb-41d0-9c40-43f697875a5e\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.472063 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b2rj\" (UniqueName: \"kubernetes.io/projected/7657f9d3-81bb-41d0-9c40-43f697875a5e-kube-api-access-7b2rj\") pod \"7657f9d3-81bb-41d0-9c40-43f697875a5e\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.472297 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-config-data\") pod \"7657f9d3-81bb-41d0-9c40-43f697875a5e\" (UID: \"7657f9d3-81bb-41d0-9c40-43f697875a5e\") " Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.481024 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7657f9d3-81bb-41d0-9c40-43f697875a5e-kube-api-access-7b2rj" (OuterVolumeSpecName: "kube-api-access-7b2rj") pod "7657f9d3-81bb-41d0-9c40-43f697875a5e" (UID: "7657f9d3-81bb-41d0-9c40-43f697875a5e"). InnerVolumeSpecName "kube-api-access-7b2rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.502411 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7657f9d3-81bb-41d0-9c40-43f697875a5e" (UID: "7657f9d3-81bb-41d0-9c40-43f697875a5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.518756 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-config-data" (OuterVolumeSpecName: "config-data") pod "7657f9d3-81bb-41d0-9c40-43f697875a5e" (UID: "7657f9d3-81bb-41d0-9c40-43f697875a5e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.574044 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.574092 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b2rj\" (UniqueName: \"kubernetes.io/projected/7657f9d3-81bb-41d0-9c40-43f697875a5e-kube-api-access-7b2rj\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:40 crc kubenswrapper[4876]: I1215 07:12:40.574135 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7657f9d3-81bb-41d0-9c40-43f697875a5e-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.080724 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-94d2c" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.080757 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-94d2c" event={"ID":"7657f9d3-81bb-41d0-9c40-43f697875a5e","Type":"ContainerDied","Data":"4d89f79673256149279c4f025c3d197abb17d74e4092c7b44b3f7d85ed1f7050"} Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.081196 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d89f79673256149279c4f025c3d197abb17d74e4092c7b44b3f7d85ed1f7050" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.354918 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8467b54bcc-4gh2v"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.376814 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-57f4s"] Dec 15 07:12:41 crc kubenswrapper[4876]: E1215 07:12:41.377411 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7657f9d3-81bb-41d0-9c40-43f697875a5e" containerName="keystone-db-sync" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.377433 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7657f9d3-81bb-41d0-9c40-43f697875a5e" containerName="keystone-db-sync" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.377623 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7657f9d3-81bb-41d0-9c40-43f697875a5e" containerName="keystone-db-sync" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.378722 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.380395 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.385142 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.387392 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.387514 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.388273 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-v76mv" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.394267 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58647bbf65-wt54t"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.395679 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.404735 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-57f4s"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.443736 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58647bbf65-wt54t"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.489450 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-scripts\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.489506 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-credential-keys\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.489544 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-config-data\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.489578 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s49qj\" (UniqueName: \"kubernetes.io/projected/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-kube-api-access-s49qj\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.489628 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-fernet-keys\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.489667 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-combined-ca-bundle\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.539349 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.541675 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.543237 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.544174 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.581068 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.600975 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-config-data\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.601036 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-fernet-keys\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.601074 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-nb\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.607802 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-combined-ca-bundle\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.607876 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9mrs\" (UniqueName: \"kubernetes.io/projected/ab7db757-6d75-4aca-b4c8-b11771c169f4-kube-api-access-x9mrs\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.607940 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-scripts\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608002 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-swift-storage-0\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608062 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608118 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608163 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-svc\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608208 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw7sl\" (UniqueName: \"kubernetes.io/projected/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-kube-api-access-qw7sl\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608241 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-config\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608285 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-run-httpd\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608303 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-log-httpd\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608326 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-scripts\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608362 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-sb\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608387 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-credential-keys\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608455 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-config-data\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.608512 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s49qj\" (UniqueName: \"kubernetes.io/projected/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-kube-api-access-s49qj\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.620911 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-credential-keys\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.624156 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-fernet-keys\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.624831 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-combined-ca-bundle\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.632446 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-config-data\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.632867 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s49qj\" (UniqueName: \"kubernetes.io/projected/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-kube-api-access-s49qj\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.635570 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-scripts\") pod \"keystone-bootstrap-57f4s\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.639871 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-mx7zg"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.655623 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.661959 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.662139 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.662223 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-c2vjp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.667719 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-mx7zg"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.685129 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-nhpf5"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.686460 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.708383 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.708647 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709649 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-db-sync-config-data\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709712 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-combined-ca-bundle\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709757 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-config-data\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709793 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aaaef001-e178-4e64-80b1-e86fbd15ba8e-etc-machine-id\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709821 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-nb\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709846 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-combined-ca-bundle\") pod \"neutron-db-sync-nhpf5\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709881 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9mrs\" (UniqueName: \"kubernetes.io/projected/ab7db757-6d75-4aca-b4c8-b11771c169f4-kube-api-access-x9mrs\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709912 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-scripts\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709935 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-config-data\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709957 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-swift-storage-0\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.709980 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcz5s\" (UniqueName: \"kubernetes.io/projected/aaaef001-e178-4e64-80b1-e86fbd15ba8e-kube-api-access-pcz5s\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710003 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710024 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-config\") pod \"neutron-db-sync-nhpf5\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710040 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710062 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-svc\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710085 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw7sl\" (UniqueName: \"kubernetes.io/projected/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-kube-api-access-qw7sl\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710128 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-config\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710160 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-run-httpd\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710181 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-log-httpd\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710200 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-scripts\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710222 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-sb\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.710251 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8vw6\" (UniqueName: \"kubernetes.io/projected/79c75431-f896-4bee-9649-3a94221ace73-kube-api-access-p8vw6\") pod \"neutron-db-sync-nhpf5\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.713256 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-jxxgm" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.715365 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-nb\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.716026 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-config\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.719551 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-log-httpd\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.719870 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-svc\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.720224 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-run-httpd\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.721282 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.721826 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-sb\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.722659 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-swift-storage-0\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.724724 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-scripts\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.727259 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-config-data\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.732662 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.742696 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-nhpf5"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.759645 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.769452 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw7sl\" (UniqueName: \"kubernetes.io/projected/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-kube-api-access-qw7sl\") pod \"ceilometer-0\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.770177 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58647bbf65-wt54t"] Dec 15 07:12:41 crc kubenswrapper[4876]: E1215 07:12:41.770907 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-x9mrs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-58647bbf65-wt54t" podUID="ab7db757-6d75-4aca-b4c8-b11771c169f4" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.786038 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-qnzgp"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.786813 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9mrs\" (UniqueName: \"kubernetes.io/projected/ab7db757-6d75-4aca-b4c8-b11771c169f4-kube-api-access-x9mrs\") pod \"dnsmasq-dns-58647bbf65-wt54t\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.788695 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.805674 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.805916 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.806063 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-r4vkj" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812205 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-config-data\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812267 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-scripts\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812295 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcz5s\" (UniqueName: \"kubernetes.io/projected/aaaef001-e178-4e64-80b1-e86fbd15ba8e-kube-api-access-pcz5s\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812321 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-config\") pod \"neutron-db-sync-nhpf5\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812356 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-scripts\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812371 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-combined-ca-bundle\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812395 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8vw6\" (UniqueName: \"kubernetes.io/projected/79c75431-f896-4bee-9649-3a94221ace73-kube-api-access-p8vw6\") pod \"neutron-db-sync-nhpf5\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812423 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-db-sync-config-data\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812453 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-combined-ca-bundle\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812472 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46057e1c-873e-40f1-81c2-77c99f416cc7-logs\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812489 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-config-data\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812515 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aaaef001-e178-4e64-80b1-e86fbd15ba8e-etc-machine-id\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812533 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6krf\" (UniqueName: \"kubernetes.io/projected/46057e1c-873e-40f1-81c2-77c99f416cc7-kube-api-access-t6krf\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.812554 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-combined-ca-bundle\") pod \"neutron-db-sync-nhpf5\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.818377 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aaaef001-e178-4e64-80b1-e86fbd15ba8e-etc-machine-id\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.832187 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-combined-ca-bundle\") pod \"neutron-db-sync-nhpf5\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.834311 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-config-data\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.834754 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-scripts\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.838270 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-s6h7g"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.839364 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.839606 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-db-sync-config-data\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.840430 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-config\") pod \"neutron-db-sync-nhpf5\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.840913 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-combined-ca-bundle\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.843638 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.843874 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-hhgc7" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.844598 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8vw6\" (UniqueName: \"kubernetes.io/projected/79c75431-f896-4bee-9649-3a94221ace73-kube-api-access-p8vw6\") pod \"neutron-db-sync-nhpf5\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.848937 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fd458c8cc-wwt7l"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.848936 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcz5s\" (UniqueName: \"kubernetes.io/projected/aaaef001-e178-4e64-80b1-e86fbd15ba8e-kube-api-access-pcz5s\") pod \"cinder-db-sync-mx7zg\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.850363 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.856667 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-s6h7g"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.864281 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-qnzgp"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.871482 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.872237 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fd458c8cc-wwt7l"] Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.900931 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914362 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-db-sync-config-data\") pod \"barbican-db-sync-s6h7g\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914516 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-swift-storage-0\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914543 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-scripts\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914585 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d4tl\" (UniqueName: \"kubernetes.io/projected/e4c493b4-178a-4f10-b27b-e15d970c9e76-kube-api-access-7d4tl\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914603 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-combined-ca-bundle\") pod \"barbican-db-sync-s6h7g\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914637 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-nb\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914684 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvstj\" (UniqueName: \"kubernetes.io/projected/88bc8b14-b04f-4062-b400-f8387184810e-kube-api-access-zvstj\") pod \"barbican-db-sync-s6h7g\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914708 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-svc\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914757 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-combined-ca-bundle\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914841 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-config\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914924 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46057e1c-873e-40f1-81c2-77c99f416cc7-logs\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914944 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-config-data\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.914964 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-sb\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.915539 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6krf\" (UniqueName: \"kubernetes.io/projected/46057e1c-873e-40f1-81c2-77c99f416cc7-kube-api-access-t6krf\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.916557 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46057e1c-873e-40f1-81c2-77c99f416cc7-logs\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.920796 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.922907 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-combined-ca-bundle\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.923049 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-config-data\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.927542 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-scripts\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.933567 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6krf\" (UniqueName: \"kubernetes.io/projected/46057e1c-873e-40f1-81c2-77c99f416cc7-kube-api-access-t6krf\") pod \"placement-db-sync-qnzgp\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:41 crc kubenswrapper[4876]: I1215 07:12:41.937845 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-qnzgp" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.016911 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-db-sync-config-data\") pod \"barbican-db-sync-s6h7g\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.017414 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-swift-storage-0\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.017442 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d4tl\" (UniqueName: \"kubernetes.io/projected/e4c493b4-178a-4f10-b27b-e15d970c9e76-kube-api-access-7d4tl\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.017457 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-combined-ca-bundle\") pod \"barbican-db-sync-s6h7g\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.017481 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-nb\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.017502 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvstj\" (UniqueName: \"kubernetes.io/projected/88bc8b14-b04f-4062-b400-f8387184810e-kube-api-access-zvstj\") pod \"barbican-db-sync-s6h7g\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.017525 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-svc\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.017569 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-config\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.017624 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-sb\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.019629 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-nb\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.019975 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-swift-storage-0\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.021899 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-sb\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.027839 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-combined-ca-bundle\") pod \"barbican-db-sync-s6h7g\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.028224 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-svc\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.028261 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-db-sync-config-data\") pod \"barbican-db-sync-s6h7g\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.036842 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d4tl\" (UniqueName: \"kubernetes.io/projected/e4c493b4-178a-4f10-b27b-e15d970c9e76-kube-api-access-7d4tl\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.040169 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-config\") pod \"dnsmasq-dns-fd458c8cc-wwt7l\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.047780 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvstj\" (UniqueName: \"kubernetes.io/projected/88bc8b14-b04f-4062-b400-f8387184810e-kube-api-access-zvstj\") pod \"barbican-db-sync-s6h7g\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.093971 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.094060 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" podUID="ff39e07e-4499-48d6-9faf-0f38bd986e3e" containerName="dnsmasq-dns" containerID="cri-o://d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d" gracePeriod=10 Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.109934 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.221322 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-svc\") pod \"ab7db757-6d75-4aca-b4c8-b11771c169f4\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.221386 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9mrs\" (UniqueName: \"kubernetes.io/projected/ab7db757-6d75-4aca-b4c8-b11771c169f4-kube-api-access-x9mrs\") pod \"ab7db757-6d75-4aca-b4c8-b11771c169f4\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.221405 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-swift-storage-0\") pod \"ab7db757-6d75-4aca-b4c8-b11771c169f4\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.221890 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ab7db757-6d75-4aca-b4c8-b11771c169f4" (UID: "ab7db757-6d75-4aca-b4c8-b11771c169f4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.221960 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-sb\") pod \"ab7db757-6d75-4aca-b4c8-b11771c169f4\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.222001 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-nb\") pod \"ab7db757-6d75-4aca-b4c8-b11771c169f4\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.222362 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ab7db757-6d75-4aca-b4c8-b11771c169f4" (UID: "ab7db757-6d75-4aca-b4c8-b11771c169f4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.222366 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-config\") pod \"ab7db757-6d75-4aca-b4c8-b11771c169f4\" (UID: \"ab7db757-6d75-4aca-b4c8-b11771c169f4\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.222851 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.222854 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-config" (OuterVolumeSpecName: "config") pod "ab7db757-6d75-4aca-b4c8-b11771c169f4" (UID: "ab7db757-6d75-4aca-b4c8-b11771c169f4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.222866 4876 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.223236 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ab7db757-6d75-4aca-b4c8-b11771c169f4" (UID: "ab7db757-6d75-4aca-b4c8-b11771c169f4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.223834 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ab7db757-6d75-4aca-b4c8-b11771c169f4" (UID: "ab7db757-6d75-4aca-b4c8-b11771c169f4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.226915 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab7db757-6d75-4aca-b4c8-b11771c169f4-kube-api-access-x9mrs" (OuterVolumeSpecName: "kube-api-access-x9mrs") pod "ab7db757-6d75-4aca-b4c8-b11771c169f4" (UID: "ab7db757-6d75-4aca-b4c8-b11771c169f4"). InnerVolumeSpecName "kube-api-access-x9mrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.273160 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.298991 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.338175 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.338200 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.338213 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9mrs\" (UniqueName: \"kubernetes.io/projected/ab7db757-6d75-4aca-b4c8-b11771c169f4-kube-api-access-x9mrs\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.338227 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab7db757-6d75-4aca-b4c8-b11771c169f4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.413807 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-57f4s"] Dec 15 07:12:42 crc kubenswrapper[4876]: W1215 07:12:42.421283 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb07eeb56_7cbe_439e_b28b_4b3ec01ce17a.slice/crio-ed5798911a2476639fe914933cfd424d22bdd5a43649e3164f52a19f1b1ebee9 WatchSource:0}: Error finding container ed5798911a2476639fe914933cfd424d22bdd5a43649e3164f52a19f1b1ebee9: Status 404 returned error can't find the container with id ed5798911a2476639fe914933cfd424d22bdd5a43649e3164f52a19f1b1ebee9 Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.496632 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:12:42 crc kubenswrapper[4876]: W1215 07:12:42.513077 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e81fc70_c9d6_4196_8a2b_cc110e37e18e.slice/crio-d992f38d28181770a3a0d0881ab514a5b1c47f0a4c9e403ed0882c1e0935141e WatchSource:0}: Error finding container d992f38d28181770a3a0d0881ab514a5b1c47f0a4c9e403ed0882c1e0935141e: Status 404 returned error can't find the container with id d992f38d28181770a3a0d0881ab514a5b1c47f0a4c9e403ed0882c1e0935141e Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.704159 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-nhpf5"] Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.748438 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-qnzgp"] Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.887834 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-mx7zg"] Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.896712 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.983770 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-nb\") pod \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.983822 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-swift-storage-0\") pod \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.983919 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-config\") pod \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.983953 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-sb\") pod \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.983984 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-svc\") pod \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.984042 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6d7c\" (UniqueName: \"kubernetes.io/projected/ff39e07e-4499-48d6-9faf-0f38bd986e3e-kube-api-access-f6d7c\") pod \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\" (UID: \"ff39e07e-4499-48d6-9faf-0f38bd986e3e\") " Dec 15 07:12:42 crc kubenswrapper[4876]: I1215 07:12:42.992250 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff39e07e-4499-48d6-9faf-0f38bd986e3e-kube-api-access-f6d7c" (OuterVolumeSpecName: "kube-api-access-f6d7c") pod "ff39e07e-4499-48d6-9faf-0f38bd986e3e" (UID: "ff39e07e-4499-48d6-9faf-0f38bd986e3e"). InnerVolumeSpecName "kube-api-access-f6d7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.029452 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ff39e07e-4499-48d6-9faf-0f38bd986e3e" (UID: "ff39e07e-4499-48d6-9faf-0f38bd986e3e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.033787 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ff39e07e-4499-48d6-9faf-0f38bd986e3e" (UID: "ff39e07e-4499-48d6-9faf-0f38bd986e3e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.034432 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ff39e07e-4499-48d6-9faf-0f38bd986e3e" (UID: "ff39e07e-4499-48d6-9faf-0f38bd986e3e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.049219 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ff39e07e-4499-48d6-9faf-0f38bd986e3e" (UID: "ff39e07e-4499-48d6-9faf-0f38bd986e3e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.060343 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-config" (OuterVolumeSpecName: "config") pod "ff39e07e-4499-48d6-9faf-0f38bd986e3e" (UID: "ff39e07e-4499-48d6-9faf-0f38bd986e3e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.087070 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6d7c\" (UniqueName: \"kubernetes.io/projected/ff39e07e-4499-48d6-9faf-0f38bd986e3e-kube-api-access-f6d7c\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.087148 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.087168 4876 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.087188 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.087200 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.087211 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ff39e07e-4499-48d6-9faf-0f38bd986e3e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.103459 4876 generic.go:334] "Generic (PLEG): container finished" podID="ff39e07e-4499-48d6-9faf-0f38bd986e3e" containerID="d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d" exitCode=0 Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.103531 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" event={"ID":"ff39e07e-4499-48d6-9faf-0f38bd986e3e","Type":"ContainerDied","Data":"d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d"} Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.103569 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" event={"ID":"ff39e07e-4499-48d6-9faf-0f38bd986e3e","Type":"ContainerDied","Data":"fbdd48899f1b67051aa890ce4d4c48eb8fe93863e994504731e0eaed3d0801b7"} Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.103597 4876 scope.go:117] "RemoveContainer" containerID="d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.103701 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8467b54bcc-4gh2v" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.115055 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e81fc70-c9d6-4196-8a2b-cc110e37e18e","Type":"ContainerStarted","Data":"d992f38d28181770a3a0d0881ab514a5b1c47f0a4c9e403ed0882c1e0935141e"} Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.117574 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-qnzgp" event={"ID":"46057e1c-873e-40f1-81c2-77c99f416cc7","Type":"ContainerStarted","Data":"5ccaadbdec1b0fdc2f650718dc952ef818ca92f95d1bf3d236f04c1403320b8a"} Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.119051 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-57f4s" event={"ID":"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a","Type":"ContainerStarted","Data":"c8fe465ead0f1cef4b6668b9d56683285c033b94a8c0ba05de43046e72809fe2"} Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.119076 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-57f4s" event={"ID":"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a","Type":"ContainerStarted","Data":"ed5798911a2476639fe914933cfd424d22bdd5a43649e3164f52a19f1b1ebee9"} Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.125663 4876 generic.go:334] "Generic (PLEG): container finished" podID="fef940f6-b480-49ee-a794-da81719669f8" containerID="49d1be0197bd7019b41f95b753ef8f25c732d0358cd8ff9b1cadcccd7de28623" exitCode=0 Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.125734 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-j6772" event={"ID":"fef940f6-b480-49ee-a794-da81719669f8","Type":"ContainerDied","Data":"49d1be0197bd7019b41f95b753ef8f25c732d0358cd8ff9b1cadcccd7de28623"} Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.126574 4876 scope.go:117] "RemoveContainer" containerID="38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.131819 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nhpf5" event={"ID":"79c75431-f896-4bee-9649-3a94221ace73","Type":"ContainerStarted","Data":"5bdafabe22fc8d6baa7ff74db01d85c74e18a8cf0bcd1646fd4db652340f7a95"} Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.131883 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nhpf5" event={"ID":"79c75431-f896-4bee-9649-3a94221ace73","Type":"ContainerStarted","Data":"54539221888d67e1ff5cdfe4d703155454591bc2936f801420f33dae1d16cfbc"} Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.133743 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58647bbf65-wt54t" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.133741 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mx7zg" event={"ID":"aaaef001-e178-4e64-80b1-e86fbd15ba8e","Type":"ContainerStarted","Data":"a4000ed45f8582d997ccab1951c431791ad0c56a1ceca0393ee5366a47d4da5f"} Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.164342 4876 scope.go:117] "RemoveContainer" containerID="d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.166628 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-57f4s" podStartSLOduration=2.166605905 podStartE2EDuration="2.166605905s" podCreationTimestamp="2025-12-15 07:12:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:12:43.138921634 +0000 UTC m=+1288.710064555" watchObservedRunningTime="2025-12-15 07:12:43.166605905 +0000 UTC m=+1288.737748816" Dec 15 07:12:43 crc kubenswrapper[4876]: E1215 07:12:43.168507 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d\": container with ID starting with d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d not found: ID does not exist" containerID="d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.168562 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d"} err="failed to get container status \"d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d\": rpc error: code = NotFound desc = could not find container \"d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d\": container with ID starting with d5659ce172313c44a73158ba33a8ecefe82ecffea64da6c3bc0a71aac02d127d not found: ID does not exist" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.168590 4876 scope.go:117] "RemoveContainer" containerID="38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4" Dec 15 07:12:43 crc kubenswrapper[4876]: E1215 07:12:43.170564 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4\": container with ID starting with 38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4 not found: ID does not exist" containerID="38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.170593 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4"} err="failed to get container status \"38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4\": rpc error: code = NotFound desc = could not find container \"38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4\": container with ID starting with 38eb52e10f6d25f609b5dff6300f7bb5a0547d59b8c9a736ab33424327b93df4 not found: ID does not exist" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.193982 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-s6h7g"] Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.202688 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8467b54bcc-4gh2v"] Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.211784 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8467b54bcc-4gh2v"] Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.222766 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fd458c8cc-wwt7l"] Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.326151 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-nhpf5" podStartSLOduration=2.326125922 podStartE2EDuration="2.326125922s" podCreationTimestamp="2025-12-15 07:12:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:12:43.203141726 +0000 UTC m=+1288.774284647" watchObservedRunningTime="2025-12-15 07:12:43.326125922 +0000 UTC m=+1288.897268853" Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.348632 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58647bbf65-wt54t"] Dec 15 07:12:43 crc kubenswrapper[4876]: I1215 07:12:43.361265 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58647bbf65-wt54t"] Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.153076 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-s6h7g" event={"ID":"88bc8b14-b04f-4062-b400-f8387184810e","Type":"ContainerStarted","Data":"9a778577a755c720b733e8a5070f1039c0f4bb2f2cb2ddc9260db1e57f73d3b2"} Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.155501 4876 generic.go:334] "Generic (PLEG): container finished" podID="e4c493b4-178a-4f10-b27b-e15d970c9e76" containerID="718eb35e3e829b615770ca66a1d6fa964d06b266762fde869ac5735228948b7b" exitCode=0 Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.155593 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" event={"ID":"e4c493b4-178a-4f10-b27b-e15d970c9e76","Type":"ContainerDied","Data":"718eb35e3e829b615770ca66a1d6fa964d06b266762fde869ac5735228948b7b"} Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.155619 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" event={"ID":"e4c493b4-178a-4f10-b27b-e15d970c9e76","Type":"ContainerStarted","Data":"b0dd859438b92e886e951281f3d0013bb33f8f2a44187fa69cb89c6d4cf1af1f"} Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.751930 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab7db757-6d75-4aca-b4c8-b11771c169f4" path="/var/lib/kubelet/pods/ab7db757-6d75-4aca-b4c8-b11771c169f4/volumes" Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.752980 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff39e07e-4499-48d6-9faf-0f38bd986e3e" path="/var/lib/kubelet/pods/ff39e07e-4499-48d6-9faf-0f38bd986e3e/volumes" Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.755621 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.755766 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-j6772" Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.847163 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-config-data\") pod \"fef940f6-b480-49ee-a794-da81719669f8\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.847211 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-db-sync-config-data\") pod \"fef940f6-b480-49ee-a794-da81719669f8\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.847359 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-combined-ca-bundle\") pod \"fef940f6-b480-49ee-a794-da81719669f8\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.847459 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rf4j\" (UniqueName: \"kubernetes.io/projected/fef940f6-b480-49ee-a794-da81719669f8-kube-api-access-8rf4j\") pod \"fef940f6-b480-49ee-a794-da81719669f8\" (UID: \"fef940f6-b480-49ee-a794-da81719669f8\") " Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.854069 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fef940f6-b480-49ee-a794-da81719669f8-kube-api-access-8rf4j" (OuterVolumeSpecName: "kube-api-access-8rf4j") pod "fef940f6-b480-49ee-a794-da81719669f8" (UID: "fef940f6-b480-49ee-a794-da81719669f8"). InnerVolumeSpecName "kube-api-access-8rf4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.859269 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "fef940f6-b480-49ee-a794-da81719669f8" (UID: "fef940f6-b480-49ee-a794-da81719669f8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.886316 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fef940f6-b480-49ee-a794-da81719669f8" (UID: "fef940f6-b480-49ee-a794-da81719669f8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.945949 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-config-data" (OuterVolumeSpecName: "config-data") pod "fef940f6-b480-49ee-a794-da81719669f8" (UID: "fef940f6-b480-49ee-a794-da81719669f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.951451 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rf4j\" (UniqueName: \"kubernetes.io/projected/fef940f6-b480-49ee-a794-da81719669f8-kube-api-access-8rf4j\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.951489 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.951499 4876 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:44 crc kubenswrapper[4876]: I1215 07:12:44.951507 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fef940f6-b480-49ee-a794-da81719669f8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.176710 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" event={"ID":"e4c493b4-178a-4f10-b27b-e15d970c9e76","Type":"ContainerStarted","Data":"095c8d177e5f1207d84fe243f05d0e7e6cc986679e6a27b29a2f3de885033e48"} Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.183136 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-j6772" event={"ID":"fef940f6-b480-49ee-a794-da81719669f8","Type":"ContainerDied","Data":"52d8b9a31896dfca17541ae64823b032ca91653d343ea821ba26381d94f3b9d2"} Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.183180 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52d8b9a31896dfca17541ae64823b032ca91653d343ea821ba26381d94f3b9d2" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.183243 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-j6772" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.211889 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" podStartSLOduration=4.21187305 podStartE2EDuration="4.21187305s" podCreationTimestamp="2025-12-15 07:12:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:12:45.199721367 +0000 UTC m=+1290.770864298" watchObservedRunningTime="2025-12-15 07:12:45.21187305 +0000 UTC m=+1290.783015961" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.553670 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fd458c8cc-wwt7l"] Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.645087 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5dc4fcdbc-fclsf"] Dec 15 07:12:45 crc kubenswrapper[4876]: E1215 07:12:45.645985 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fef940f6-b480-49ee-a794-da81719669f8" containerName="glance-db-sync" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.646025 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fef940f6-b480-49ee-a794-da81719669f8" containerName="glance-db-sync" Dec 15 07:12:45 crc kubenswrapper[4876]: E1215 07:12:45.646061 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff39e07e-4499-48d6-9faf-0f38bd986e3e" containerName="dnsmasq-dns" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.646071 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff39e07e-4499-48d6-9faf-0f38bd986e3e" containerName="dnsmasq-dns" Dec 15 07:12:45 crc kubenswrapper[4876]: E1215 07:12:45.646133 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff39e07e-4499-48d6-9faf-0f38bd986e3e" containerName="init" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.646142 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff39e07e-4499-48d6-9faf-0f38bd986e3e" containerName="init" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.647035 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fef940f6-b480-49ee-a794-da81719669f8" containerName="glance-db-sync" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.647062 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff39e07e-4499-48d6-9faf-0f38bd986e3e" containerName="dnsmasq-dns" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.654812 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.687549 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-nb\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.687912 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtm4c\" (UniqueName: \"kubernetes.io/projected/7ed3ae48-fe00-4892-9117-12b25c129b84-kube-api-access-jtm4c\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.687989 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-config\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.688023 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-sb\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.688063 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-swift-storage-0\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.688169 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-svc\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.703953 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dc4fcdbc-fclsf"] Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.789544 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-svc\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.789980 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-nb\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.790086 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtm4c\" (UniqueName: \"kubernetes.io/projected/7ed3ae48-fe00-4892-9117-12b25c129b84-kube-api-access-jtm4c\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.791469 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-config\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.791586 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-sb\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.791711 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-swift-storage-0\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.791154 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-svc\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.791390 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-nb\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.792844 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-config\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.793651 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-sb\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.794608 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-swift-storage-0\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:45 crc kubenswrapper[4876]: I1215 07:12:45.818133 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtm4c\" (UniqueName: \"kubernetes.io/projected/7ed3ae48-fe00-4892-9117-12b25c129b84-kube-api-access-jtm4c\") pod \"dnsmasq-dns-5dc4fcdbc-fclsf\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.050599 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.196496 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.467414 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.469905 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.473869 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-kkht2" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.474402 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.474845 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.480334 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.514800 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-logs\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.515760 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-scripts\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.515878 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.516017 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.516144 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-config-data\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.516280 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn6kp\" (UniqueName: \"kubernetes.io/projected/de8bd253-c7e8-4315-8afc-ad0d2046cd12-kube-api-access-qn6kp\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.516480 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.618528 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-logs\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.618588 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-scripts\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.618620 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.618680 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.618716 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-config-data\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.618762 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn6kp\" (UniqueName: \"kubernetes.io/projected/de8bd253-c7e8-4315-8afc-ad0d2046cd12-kube-api-access-qn6kp\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.618832 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.619665 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.620489 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.621288 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-logs\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.626049 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-scripts\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.626172 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.626298 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-config-data\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.640688 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn6kp\" (UniqueName: \"kubernetes.io/projected/de8bd253-c7e8-4315-8afc-ad0d2046cd12-kube-api-access-qn6kp\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.657768 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.762364 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.763823 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.767154 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.779013 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.800754 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.822936 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.823013 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.823077 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.823146 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.823312 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwvp9\" (UniqueName: \"kubernetes.io/projected/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-kube-api-access-qwvp9\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.823367 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.823422 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-logs\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.926063 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwvp9\" (UniqueName: \"kubernetes.io/projected/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-kube-api-access-qwvp9\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.926147 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.926175 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-logs\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.926307 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.926344 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.926402 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.926439 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.928050 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.928166 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.928481 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-logs\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.932384 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.934579 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.946574 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.948172 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwvp9\" (UniqueName: \"kubernetes.io/projected/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-kube-api-access-qwvp9\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:46 crc kubenswrapper[4876]: I1215 07:12:46.971200 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:12:47 crc kubenswrapper[4876]: I1215 07:12:47.092540 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:12:47 crc kubenswrapper[4876]: I1215 07:12:47.203564 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" podUID="e4c493b4-178a-4f10-b27b-e15d970c9e76" containerName="dnsmasq-dns" containerID="cri-o://095c8d177e5f1207d84fe243f05d0e7e6cc986679e6a27b29a2f3de885033e48" gracePeriod=10 Dec 15 07:12:48 crc kubenswrapper[4876]: I1215 07:12:48.244078 4876 generic.go:334] "Generic (PLEG): container finished" podID="b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" containerID="c8fe465ead0f1cef4b6668b9d56683285c033b94a8c0ba05de43046e72809fe2" exitCode=0 Dec 15 07:12:48 crc kubenswrapper[4876]: I1215 07:12:48.244513 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-57f4s" event={"ID":"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a","Type":"ContainerDied","Data":"c8fe465ead0f1cef4b6668b9d56683285c033b94a8c0ba05de43046e72809fe2"} Dec 15 07:12:48 crc kubenswrapper[4876]: I1215 07:12:48.293898 4876 generic.go:334] "Generic (PLEG): container finished" podID="e4c493b4-178a-4f10-b27b-e15d970c9e76" containerID="095c8d177e5f1207d84fe243f05d0e7e6cc986679e6a27b29a2f3de885033e48" exitCode=0 Dec 15 07:12:48 crc kubenswrapper[4876]: I1215 07:12:48.293942 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" event={"ID":"e4c493b4-178a-4f10-b27b-e15d970c9e76","Type":"ContainerDied","Data":"095c8d177e5f1207d84fe243f05d0e7e6cc986679e6a27b29a2f3de885033e48"} Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.650408 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.704638 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-config\") pod \"e4c493b4-178a-4f10-b27b-e15d970c9e76\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.704745 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-nb\") pod \"e4c493b4-178a-4f10-b27b-e15d970c9e76\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.704818 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7d4tl\" (UniqueName: \"kubernetes.io/projected/e4c493b4-178a-4f10-b27b-e15d970c9e76-kube-api-access-7d4tl\") pod \"e4c493b4-178a-4f10-b27b-e15d970c9e76\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.704873 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-swift-storage-0\") pod \"e4c493b4-178a-4f10-b27b-e15d970c9e76\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.704909 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-svc\") pod \"e4c493b4-178a-4f10-b27b-e15d970c9e76\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.705004 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-sb\") pod \"e4c493b4-178a-4f10-b27b-e15d970c9e76\" (UID: \"e4c493b4-178a-4f10-b27b-e15d970c9e76\") " Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.723510 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4c493b4-178a-4f10-b27b-e15d970c9e76-kube-api-access-7d4tl" (OuterVolumeSpecName: "kube-api-access-7d4tl") pod "e4c493b4-178a-4f10-b27b-e15d970c9e76" (UID: "e4c493b4-178a-4f10-b27b-e15d970c9e76"). InnerVolumeSpecName "kube-api-access-7d4tl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.758663 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e4c493b4-178a-4f10-b27b-e15d970c9e76" (UID: "e4c493b4-178a-4f10-b27b-e15d970c9e76"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.773173 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e4c493b4-178a-4f10-b27b-e15d970c9e76" (UID: "e4c493b4-178a-4f10-b27b-e15d970c9e76"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.776313 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e4c493b4-178a-4f10-b27b-e15d970c9e76" (UID: "e4c493b4-178a-4f10-b27b-e15d970c9e76"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.782898 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e4c493b4-178a-4f10-b27b-e15d970c9e76" (UID: "e4c493b4-178a-4f10-b27b-e15d970c9e76"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.806986 4876 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.807022 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.807033 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.807043 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.807051 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7d4tl\" (UniqueName: \"kubernetes.io/projected/e4c493b4-178a-4f10-b27b-e15d970c9e76-kube-api-access-7d4tl\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.822978 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-config" (OuterVolumeSpecName: "config") pod "e4c493b4-178a-4f10-b27b-e15d970c9e76" (UID: "e4c493b4-178a-4f10-b27b-e15d970c9e76"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:12:50 crc kubenswrapper[4876]: I1215 07:12:50.908261 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4c493b4-178a-4f10-b27b-e15d970c9e76-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:51 crc kubenswrapper[4876]: I1215 07:12:51.320298 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" event={"ID":"e4c493b4-178a-4f10-b27b-e15d970c9e76","Type":"ContainerDied","Data":"b0dd859438b92e886e951281f3d0013bb33f8f2a44187fa69cb89c6d4cf1af1f"} Dec 15 07:12:51 crc kubenswrapper[4876]: I1215 07:12:51.320348 4876 scope.go:117] "RemoveContainer" containerID="095c8d177e5f1207d84fe243f05d0e7e6cc986679e6a27b29a2f3de885033e48" Dec 15 07:12:51 crc kubenswrapper[4876]: I1215 07:12:51.320453 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fd458c8cc-wwt7l" Dec 15 07:12:51 crc kubenswrapper[4876]: I1215 07:12:51.361976 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fd458c8cc-wwt7l"] Dec 15 07:12:51 crc kubenswrapper[4876]: I1215 07:12:51.369839 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fd458c8cc-wwt7l"] Dec 15 07:12:52 crc kubenswrapper[4876]: I1215 07:12:52.486664 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:12:52 crc kubenswrapper[4876]: I1215 07:12:52.548670 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:12:52 crc kubenswrapper[4876]: I1215 07:12:52.743878 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4c493b4-178a-4f10-b27b-e15d970c9e76" path="/var/lib/kubelet/pods/e4c493b4-178a-4f10-b27b-e15d970c9e76/volumes" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.478732 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.668611 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-combined-ca-bundle\") pod \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.668882 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-fernet-keys\") pod \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.668916 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-credential-keys\") pod \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.668952 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s49qj\" (UniqueName: \"kubernetes.io/projected/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-kube-api-access-s49qj\") pod \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.669002 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-config-data\") pod \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.669052 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-scripts\") pod \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\" (UID: \"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a\") " Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.688240 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" (UID: "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.688286 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" (UID: "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.688292 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-kube-api-access-s49qj" (OuterVolumeSpecName: "kube-api-access-s49qj") pod "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" (UID: "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a"). InnerVolumeSpecName "kube-api-access-s49qj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.688399 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-scripts" (OuterVolumeSpecName: "scripts") pod "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" (UID: "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.696304 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" (UID: "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.696430 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-config-data" (OuterVolumeSpecName: "config-data") pod "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" (UID: "b07eeb56-7cbe-439e-b28b-4b3ec01ce17a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.770982 4876 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.771022 4876 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.771036 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s49qj\" (UniqueName: \"kubernetes.io/projected/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-kube-api-access-s49qj\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.771049 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.771060 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:53 crc kubenswrapper[4876]: I1215 07:12:53.771071 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.349908 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-57f4s" event={"ID":"b07eeb56-7cbe-439e-b28b-4b3ec01ce17a","Type":"ContainerDied","Data":"ed5798911a2476639fe914933cfd424d22bdd5a43649e3164f52a19f1b1ebee9"} Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.349946 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed5798911a2476639fe914933cfd424d22bdd5a43649e3164f52a19f1b1ebee9" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.350025 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-57f4s" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.665120 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-57f4s"] Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.672551 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-57f4s"] Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.717725 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" path="/var/lib/kubelet/pods/b07eeb56-7cbe-439e-b28b-4b3ec01ce17a/volumes" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.763855 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-8jtjq"] Dec 15 07:12:54 crc kubenswrapper[4876]: E1215 07:12:54.764313 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4c493b4-178a-4f10-b27b-e15d970c9e76" containerName="dnsmasq-dns" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.764445 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4c493b4-178a-4f10-b27b-e15d970c9e76" containerName="dnsmasq-dns" Dec 15 07:12:54 crc kubenswrapper[4876]: E1215 07:12:54.764467 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" containerName="keystone-bootstrap" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.764476 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" containerName="keystone-bootstrap" Dec 15 07:12:54 crc kubenswrapper[4876]: E1215 07:12:54.764485 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4c493b4-178a-4f10-b27b-e15d970c9e76" containerName="init" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.764493 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4c493b4-178a-4f10-b27b-e15d970c9e76" containerName="init" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.764715 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4c493b4-178a-4f10-b27b-e15d970c9e76" containerName="dnsmasq-dns" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.764734 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b07eeb56-7cbe-439e-b28b-4b3ec01ce17a" containerName="keystone-bootstrap" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.765390 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.767488 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.767867 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.767884 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-v76mv" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.767894 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.770735 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.796738 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-8jtjq"] Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.887799 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-credential-keys\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.887855 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-config-data\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.887948 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-combined-ca-bundle\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.887997 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-fernet-keys\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.888099 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-scripts\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.888193 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr6g7\" (UniqueName: \"kubernetes.io/projected/fb7788fd-6d1c-4251-9629-2f550c34b522-kube-api-access-sr6g7\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.989814 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-credential-keys\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.990183 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-config-data\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.990245 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-combined-ca-bundle\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.990275 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-fernet-keys\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.990322 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-scripts\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.990355 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr6g7\" (UniqueName: \"kubernetes.io/projected/fb7788fd-6d1c-4251-9629-2f550c34b522-kube-api-access-sr6g7\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.995248 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-scripts\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.995405 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-credential-keys\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:54 crc kubenswrapper[4876]: I1215 07:12:54.996050 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-fernet-keys\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:55 crc kubenswrapper[4876]: I1215 07:12:55.010630 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-combined-ca-bundle\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:55 crc kubenswrapper[4876]: I1215 07:12:55.016955 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr6g7\" (UniqueName: \"kubernetes.io/projected/fb7788fd-6d1c-4251-9629-2f550c34b522-kube-api-access-sr6g7\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:55 crc kubenswrapper[4876]: I1215 07:12:55.023251 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-config-data\") pod \"keystone-bootstrap-8jtjq\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:55 crc kubenswrapper[4876]: I1215 07:12:55.089038 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:12:57 crc kubenswrapper[4876]: I1215 07:12:57.322645 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:12:57 crc kubenswrapper[4876]: I1215 07:12:57.322984 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:13:01 crc kubenswrapper[4876]: I1215 07:13:01.414950 4876 generic.go:334] "Generic (PLEG): container finished" podID="79c75431-f896-4bee-9649-3a94221ace73" containerID="5bdafabe22fc8d6baa7ff74db01d85c74e18a8cf0bcd1646fd4db652340f7a95" exitCode=0 Dec 15 07:13:01 crc kubenswrapper[4876]: I1215 07:13:01.415030 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nhpf5" event={"ID":"79c75431-f896-4bee-9649-3a94221ace73","Type":"ContainerDied","Data":"5bdafabe22fc8d6baa7ff74db01d85c74e18a8cf0bcd1646fd4db652340f7a95"} Dec 15 07:13:02 crc kubenswrapper[4876]: E1215 07:13:02.617587 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16" Dec 15 07:13:02 crc kubenswrapper[4876]: E1215 07:13:02.617845 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zvstj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-s6h7g_openstack(88bc8b14-b04f-4062-b400-f8387184810e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:13:02 crc kubenswrapper[4876]: E1215 07:13:02.619277 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-s6h7g" podUID="88bc8b14-b04f-4062-b400-f8387184810e" Dec 15 07:13:02 crc kubenswrapper[4876]: I1215 07:13:02.625189 4876 scope.go:117] "RemoveContainer" containerID="718eb35e3e829b615770ca66a1d6fa964d06b266762fde869ac5735228948b7b" Dec 15 07:13:03 crc kubenswrapper[4876]: I1215 07:13:03.089866 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:13:03 crc kubenswrapper[4876]: E1215 07:13:03.435373 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16\\\"\"" pod="openstack/barbican-db-sync-s6h7g" podUID="88bc8b14-b04f-4062-b400-f8387184810e" Dec 15 07:13:03 crc kubenswrapper[4876]: E1215 07:13:03.814119 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49" Dec 15 07:13:03 crc kubenswrapper[4876]: E1215 07:13:03.814585 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pcz5s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-mx7zg_openstack(aaaef001-e178-4e64-80b1-e86fbd15ba8e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 07:13:03 crc kubenswrapper[4876]: E1215 07:13:03.816630 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-mx7zg" podUID="aaaef001-e178-4e64-80b1-e86fbd15ba8e" Dec 15 07:13:03 crc kubenswrapper[4876]: I1215 07:13:03.924560 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.052475 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-combined-ca-bundle\") pod \"79c75431-f896-4bee-9649-3a94221ace73\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.053052 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8vw6\" (UniqueName: \"kubernetes.io/projected/79c75431-f896-4bee-9649-3a94221ace73-kube-api-access-p8vw6\") pod \"79c75431-f896-4bee-9649-3a94221ace73\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.053081 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-config\") pod \"79c75431-f896-4bee-9649-3a94221ace73\" (UID: \"79c75431-f896-4bee-9649-3a94221ace73\") " Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.057499 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79c75431-f896-4bee-9649-3a94221ace73-kube-api-access-p8vw6" (OuterVolumeSpecName: "kube-api-access-p8vw6") pod "79c75431-f896-4bee-9649-3a94221ace73" (UID: "79c75431-f896-4bee-9649-3a94221ace73"). InnerVolumeSpecName "kube-api-access-p8vw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.081180 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79c75431-f896-4bee-9649-3a94221ace73" (UID: "79c75431-f896-4bee-9649-3a94221ace73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.085051 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-config" (OuterVolumeSpecName: "config") pod "79c75431-f896-4bee-9649-3a94221ace73" (UID: "79c75431-f896-4bee-9649-3a94221ace73"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.155432 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8vw6\" (UniqueName: \"kubernetes.io/projected/79c75431-f896-4bee-9649-3a94221ace73-kube-api-access-p8vw6\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.155467 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.155478 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79c75431-f896-4bee-9649-3a94221ace73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.187200 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dc4fcdbc-fclsf"] Dec 15 07:13:04 crc kubenswrapper[4876]: W1215 07:13:04.198515 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ed3ae48_fe00_4892_9117_12b25c129b84.slice/crio-e0522473553f44fcd2f0f99ae7401e86efec1c29ba6e47a29ca051246f7c32d1 WatchSource:0}: Error finding container e0522473553f44fcd2f0f99ae7401e86efec1c29ba6e47a29ca051246f7c32d1: Status 404 returned error can't find the container with id e0522473553f44fcd2f0f99ae7401e86efec1c29ba6e47a29ca051246f7c32d1 Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.293358 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-8jtjq"] Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.378792 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.444969 4876 generic.go:334] "Generic (PLEG): container finished" podID="7ed3ae48-fe00-4892-9117-12b25c129b84" containerID="8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d" exitCode=0 Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.445035 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" event={"ID":"7ed3ae48-fe00-4892-9117-12b25c129b84","Type":"ContainerDied","Data":"8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d"} Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.445064 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" event={"ID":"7ed3ae48-fe00-4892-9117-12b25c129b84","Type":"ContainerStarted","Data":"e0522473553f44fcd2f0f99ae7401e86efec1c29ba6e47a29ca051246f7c32d1"} Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.448253 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nhpf5" event={"ID":"79c75431-f896-4bee-9649-3a94221ace73","Type":"ContainerDied","Data":"54539221888d67e1ff5cdfe4d703155454591bc2936f801420f33dae1d16cfbc"} Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.448285 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nhpf5" Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.448290 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54539221888d67e1ff5cdfe4d703155454591bc2936f801420f33dae1d16cfbc" Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.454473 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8jtjq" event={"ID":"fb7788fd-6d1c-4251-9629-2f550c34b522","Type":"ContainerStarted","Data":"8045507e8bd0fe48cbd27a3f3b717fe7afc1bc3d2d0105c99ca01070497fecf6"} Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.459123 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e81fc70-c9d6-4196-8a2b-cc110e37e18e","Type":"ContainerStarted","Data":"0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265"} Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.464806 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6","Type":"ContainerStarted","Data":"5a024ee269cb070b5b4726257c006dd4f8b0845af671b04d95beebe8b522f5f0"} Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.469765 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-qnzgp" event={"ID":"46057e1c-873e-40f1-81c2-77c99f416cc7","Type":"ContainerStarted","Data":"8b931409e9892662e3816fe38ace9a8974bc0af1f3224b7c5f8239fed95708bc"} Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.473655 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"de8bd253-c7e8-4315-8afc-ad0d2046cd12","Type":"ContainerStarted","Data":"81a29b7d93f765a228a3781923863fbde95731184036d5ed64331e57ee7c3f27"} Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.473696 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"de8bd253-c7e8-4315-8afc-ad0d2046cd12","Type":"ContainerStarted","Data":"20badb3dcc91547103c138f1d65bf313b4a43755f4f9e15fe6e6f50e97214479"} Dec 15 07:13:04 crc kubenswrapper[4876]: E1215 07:13:04.475484 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49\\\"\"" pod="openstack/cinder-db-sync-mx7zg" podUID="aaaef001-e178-4e64-80b1-e86fbd15ba8e" Dec 15 07:13:04 crc kubenswrapper[4876]: I1215 07:13:04.491441 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-qnzgp" podStartSLOduration=3.60796329 podStartE2EDuration="23.491425136s" podCreationTimestamp="2025-12-15 07:12:41 +0000 UTC" firstStartedPulling="2025-12-15 07:12:42.730364905 +0000 UTC m=+1288.301507826" lastFinishedPulling="2025-12-15 07:13:02.613826761 +0000 UTC m=+1308.184969672" observedRunningTime="2025-12-15 07:13:04.486381344 +0000 UTC m=+1310.057524275" watchObservedRunningTime="2025-12-15 07:13:04.491425136 +0000 UTC m=+1310.062568047" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.248269 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dc4fcdbc-fclsf"] Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.363175 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b9c8b59c-8kw59"] Dec 15 07:13:05 crc kubenswrapper[4876]: E1215 07:13:05.363886 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79c75431-f896-4bee-9649-3a94221ace73" containerName="neutron-db-sync" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.363906 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="79c75431-f896-4bee-9649-3a94221ace73" containerName="neutron-db-sync" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.364164 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="79c75431-f896-4bee-9649-3a94221ace73" containerName="neutron-db-sync" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.365260 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.375029 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b9c8b59c-8kw59"] Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.433147 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6f6fb4f468-c4zbx"] Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.434472 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.438688 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.438875 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.438975 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-jxxgm" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.447488 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.451853 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f6fb4f468-c4zbx"] Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.508542 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-sb\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.508657 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-config\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.508854 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lflh7\" (UniqueName: \"kubernetes.io/projected/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-kube-api-access-lflh7\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.508998 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-swift-storage-0\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.509178 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-svc\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.509239 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-nb\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.603245 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" event={"ID":"7ed3ae48-fe00-4892-9117-12b25c129b84","Type":"ContainerStarted","Data":"50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c"} Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.604531 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.608033 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8jtjq" event={"ID":"fb7788fd-6d1c-4251-9629-2f550c34b522","Type":"ContainerStarted","Data":"a0a8d5ff29d8c7fb66681edfebb386b1c81d12a16bcc7db3d27a1bac7237e1fd"} Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.610318 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6","Type":"ContainerStarted","Data":"38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655"} Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.612529 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-httpd-config\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.612580 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-combined-ca-bundle\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.612628 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-svc\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.612662 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-ovndb-tls-certs\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.612702 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-nb\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.612725 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-config\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.612771 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57kwg\" (UniqueName: \"kubernetes.io/projected/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-kube-api-access-57kwg\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.612889 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-sb\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.613124 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-config\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.613165 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lflh7\" (UniqueName: \"kubernetes.io/projected/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-kube-api-access-lflh7\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.613205 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-swift-storage-0\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.614193 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-swift-storage-0\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.614867 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-svc\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.615970 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-config\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.616033 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-sb\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.617762 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-nb\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.624976 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" containerName="glance-log" containerID="cri-o://81a29b7d93f765a228a3781923863fbde95731184036d5ed64331e57ee7c3f27" gracePeriod=30 Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.625144 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" containerName="glance-httpd" containerID="cri-o://b52e0e62bb67919c178d9fedc5ea06585e058225cdb9243ad56fa4196b5de4a1" gracePeriod=30 Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.625566 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"de8bd253-c7e8-4315-8afc-ad0d2046cd12","Type":"ContainerStarted","Data":"b52e0e62bb67919c178d9fedc5ea06585e058225cdb9243ad56fa4196b5de4a1"} Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.638301 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lflh7\" (UniqueName: \"kubernetes.io/projected/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-kube-api-access-lflh7\") pod \"dnsmasq-dns-6b9c8b59c-8kw59\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.637254 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" podStartSLOduration=20.637232381 podStartE2EDuration="20.637232381s" podCreationTimestamp="2025-12-15 07:12:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:05.624488274 +0000 UTC m=+1311.195631185" watchObservedRunningTime="2025-12-15 07:13:05.637232381 +0000 UTC m=+1311.208375312" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.667790 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-8jtjq" podStartSLOduration=11.667774178 podStartE2EDuration="11.667774178s" podCreationTimestamp="2025-12-15 07:12:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:05.666808175 +0000 UTC m=+1311.237951086" watchObservedRunningTime="2025-12-15 07:13:05.667774178 +0000 UTC m=+1311.238917089" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.716059 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-httpd-config\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.716141 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-combined-ca-bundle\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.716195 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-ovndb-tls-certs\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.716244 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-config\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.716309 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57kwg\" (UniqueName: \"kubernetes.io/projected/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-kube-api-access-57kwg\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.719643 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=20.719626769 podStartE2EDuration="20.719626769s" podCreationTimestamp="2025-12-15 07:12:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:05.708551442 +0000 UTC m=+1311.279694353" watchObservedRunningTime="2025-12-15 07:13:05.719626769 +0000 UTC m=+1311.290769680" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.721465 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.734923 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-ovndb-tls-certs\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.736804 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-combined-ca-bundle\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.740987 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-config\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.742606 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57kwg\" (UniqueName: \"kubernetes.io/projected/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-kube-api-access-57kwg\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.754353 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-httpd-config\") pod \"neutron-6f6fb4f468-c4zbx\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:05 crc kubenswrapper[4876]: I1215 07:13:05.845369 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:06 crc kubenswrapper[4876]: I1215 07:13:06.637024 4876 generic.go:334] "Generic (PLEG): container finished" podID="46057e1c-873e-40f1-81c2-77c99f416cc7" containerID="8b931409e9892662e3816fe38ace9a8974bc0af1f3224b7c5f8239fed95708bc" exitCode=0 Dec 15 07:13:06 crc kubenswrapper[4876]: I1215 07:13:06.637158 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-qnzgp" event={"ID":"46057e1c-873e-40f1-81c2-77c99f416cc7","Type":"ContainerDied","Data":"8b931409e9892662e3816fe38ace9a8974bc0af1f3224b7c5f8239fed95708bc"} Dec 15 07:13:06 crc kubenswrapper[4876]: I1215 07:13:06.641065 4876 generic.go:334] "Generic (PLEG): container finished" podID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" containerID="b52e0e62bb67919c178d9fedc5ea06585e058225cdb9243ad56fa4196b5de4a1" exitCode=0 Dec 15 07:13:06 crc kubenswrapper[4876]: I1215 07:13:06.641096 4876 generic.go:334] "Generic (PLEG): container finished" podID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" containerID="81a29b7d93f765a228a3781923863fbde95731184036d5ed64331e57ee7c3f27" exitCode=143 Dec 15 07:13:06 crc kubenswrapper[4876]: I1215 07:13:06.641208 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"de8bd253-c7e8-4315-8afc-ad0d2046cd12","Type":"ContainerDied","Data":"b52e0e62bb67919c178d9fedc5ea06585e058225cdb9243ad56fa4196b5de4a1"} Dec 15 07:13:06 crc kubenswrapper[4876]: I1215 07:13:06.641280 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"de8bd253-c7e8-4315-8afc-ad0d2046cd12","Type":"ContainerDied","Data":"81a29b7d93f765a228a3781923863fbde95731184036d5ed64331e57ee7c3f27"} Dec 15 07:13:06 crc kubenswrapper[4876]: I1215 07:13:06.645938 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" podUID="7ed3ae48-fe00-4892-9117-12b25c129b84" containerName="dnsmasq-dns" containerID="cri-o://50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c" gracePeriod=10 Dec 15 07:13:06 crc kubenswrapper[4876]: I1215 07:13:06.646078 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e81fc70-c9d6-4196-8a2b-cc110e37e18e","Type":"ContainerStarted","Data":"86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa"} Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.258719 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.370061 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-combined-ca-bundle\") pod \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.370146 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.370225 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-httpd-run\") pod \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.370277 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-scripts\") pod \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.370321 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-config-data\") pod \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.370347 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qn6kp\" (UniqueName: \"kubernetes.io/projected/de8bd253-c7e8-4315-8afc-ad0d2046cd12-kube-api-access-qn6kp\") pod \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.370386 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-logs\") pod \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\" (UID: \"de8bd253-c7e8-4315-8afc-ad0d2046cd12\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.372161 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "de8bd253-c7e8-4315-8afc-ad0d2046cd12" (UID: "de8bd253-c7e8-4315-8afc-ad0d2046cd12"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.372779 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-logs" (OuterVolumeSpecName: "logs") pod "de8bd253-c7e8-4315-8afc-ad0d2046cd12" (UID: "de8bd253-c7e8-4315-8afc-ad0d2046cd12"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.377526 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de8bd253-c7e8-4315-8afc-ad0d2046cd12-kube-api-access-qn6kp" (OuterVolumeSpecName: "kube-api-access-qn6kp") pod "de8bd253-c7e8-4315-8afc-ad0d2046cd12" (UID: "de8bd253-c7e8-4315-8afc-ad0d2046cd12"). InnerVolumeSpecName "kube-api-access-qn6kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.377608 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "de8bd253-c7e8-4315-8afc-ad0d2046cd12" (UID: "de8bd253-c7e8-4315-8afc-ad0d2046cd12"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.378277 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-scripts" (OuterVolumeSpecName: "scripts") pod "de8bd253-c7e8-4315-8afc-ad0d2046cd12" (UID: "de8bd253-c7e8-4315-8afc-ad0d2046cd12"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.399524 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de8bd253-c7e8-4315-8afc-ad0d2046cd12" (UID: "de8bd253-c7e8-4315-8afc-ad0d2046cd12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.446157 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b9c8b59c-8kw59"] Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.450812 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-config-data" (OuterVolumeSpecName: "config-data") pod "de8bd253-c7e8-4315-8afc-ad0d2046cd12" (UID: "de8bd253-c7e8-4315-8afc-ad0d2046cd12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.463929 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.475266 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtm4c\" (UniqueName: \"kubernetes.io/projected/7ed3ae48-fe00-4892-9117-12b25c129b84-kube-api-access-jtm4c\") pod \"7ed3ae48-fe00-4892-9117-12b25c129b84\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.475393 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-sb\") pod \"7ed3ae48-fe00-4892-9117-12b25c129b84\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.475468 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-nb\") pod \"7ed3ae48-fe00-4892-9117-12b25c129b84\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.475539 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-swift-storage-0\") pod \"7ed3ae48-fe00-4892-9117-12b25c129b84\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.475571 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-config\") pod \"7ed3ae48-fe00-4892-9117-12b25c129b84\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.475634 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-svc\") pod \"7ed3ae48-fe00-4892-9117-12b25c129b84\" (UID: \"7ed3ae48-fe00-4892-9117-12b25c129b84\") " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.476130 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.476145 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.476154 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qn6kp\" (UniqueName: \"kubernetes.io/projected/de8bd253-c7e8-4315-8afc-ad0d2046cd12-kube-api-access-qn6kp\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.476163 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.476171 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8bd253-c7e8-4315-8afc-ad0d2046cd12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.476190 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.476199 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de8bd253-c7e8-4315-8afc-ad0d2046cd12-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.512752 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ed3ae48-fe00-4892-9117-12b25c129b84-kube-api-access-jtm4c" (OuterVolumeSpecName: "kube-api-access-jtm4c") pod "7ed3ae48-fe00-4892-9117-12b25c129b84" (UID: "7ed3ae48-fe00-4892-9117-12b25c129b84"). InnerVolumeSpecName "kube-api-access-jtm4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.535629 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.556569 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6f6fb4f468-c4zbx"] Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.576702 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtm4c\" (UniqueName: \"kubernetes.io/projected/7ed3ae48-fe00-4892-9117-12b25c129b84-kube-api-access-jtm4c\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.576729 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.599129 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7ed3ae48-fe00-4892-9117-12b25c129b84" (UID: "7ed3ae48-fe00-4892-9117-12b25c129b84"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.609408 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7ed3ae48-fe00-4892-9117-12b25c129b84" (UID: "7ed3ae48-fe00-4892-9117-12b25c129b84"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.610303 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-config" (OuterVolumeSpecName: "config") pod "7ed3ae48-fe00-4892-9117-12b25c129b84" (UID: "7ed3ae48-fe00-4892-9117-12b25c129b84"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.618002 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7ed3ae48-fe00-4892-9117-12b25c129b84" (UID: "7ed3ae48-fe00-4892-9117-12b25c129b84"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.618859 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7ed3ae48-fe00-4892-9117-12b25c129b84" (UID: "7ed3ae48-fe00-4892-9117-12b25c129b84"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.664259 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f6fb4f468-c4zbx" event={"ID":"f0d3517e-4f36-4dfb-a18a-73b63620b6cf","Type":"ContainerStarted","Data":"471920c2286d867a03feb613896f167c821b6180fe39ce39d329eb6d602b57d7"} Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.667356 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6","Type":"ContainerStarted","Data":"3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4"} Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.667440 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" containerName="glance-log" containerID="cri-o://38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655" gracePeriod=30 Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.667477 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" containerName="glance-httpd" containerID="cri-o://3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4" gracePeriod=30 Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.679237 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.679224 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"de8bd253-c7e8-4315-8afc-ad0d2046cd12","Type":"ContainerDied","Data":"20badb3dcc91547103c138f1d65bf313b4a43755f4f9e15fe6e6f50e97214479"} Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.679384 4876 scope.go:117] "RemoveContainer" containerID="b52e0e62bb67919c178d9fedc5ea06585e058225cdb9243ad56fa4196b5de4a1" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.682629 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" event={"ID":"ad8a1b74-0614-4fb4-8f83-989c5a8b475e","Type":"ContainerStarted","Data":"35e1ca7791d4598ab0b00970656d9a8bef977b4cacebf985b4b7aeb64123576e"} Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.685276 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.685623 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.685807 4876 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.685854 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.685868 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7ed3ae48-fe00-4892-9117-12b25c129b84-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.706318 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=22.706299125 podStartE2EDuration="22.706299125s" podCreationTimestamp="2025-12-15 07:12:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:07.694854519 +0000 UTC m=+1313.265997440" watchObservedRunningTime="2025-12-15 07:13:07.706299125 +0000 UTC m=+1313.277442046" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.706523 4876 generic.go:334] "Generic (PLEG): container finished" podID="7ed3ae48-fe00-4892-9117-12b25c129b84" containerID="50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c" exitCode=0 Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.706626 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.706731 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" event={"ID":"7ed3ae48-fe00-4892-9117-12b25c129b84","Type":"ContainerDied","Data":"50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c"} Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.706765 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dc4fcdbc-fclsf" event={"ID":"7ed3ae48-fe00-4892-9117-12b25c129b84","Type":"ContainerDied","Data":"e0522473553f44fcd2f0f99ae7401e86efec1c29ba6e47a29ca051246f7c32d1"} Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.712916 4876 scope.go:117] "RemoveContainer" containerID="81a29b7d93f765a228a3781923863fbde95731184036d5ed64331e57ee7c3f27" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.757005 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-57dd59bc-td2ns"] Dec 15 07:13:07 crc kubenswrapper[4876]: E1215 07:13:07.757614 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ed3ae48-fe00-4892-9117-12b25c129b84" containerName="dnsmasq-dns" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.757636 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ed3ae48-fe00-4892-9117-12b25c129b84" containerName="dnsmasq-dns" Dec 15 07:13:07 crc kubenswrapper[4876]: E1215 07:13:07.757645 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" containerName="glance-httpd" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.757651 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" containerName="glance-httpd" Dec 15 07:13:07 crc kubenswrapper[4876]: E1215 07:13:07.757691 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ed3ae48-fe00-4892-9117-12b25c129b84" containerName="init" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.757699 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ed3ae48-fe00-4892-9117-12b25c129b84" containerName="init" Dec 15 07:13:07 crc kubenswrapper[4876]: E1215 07:13:07.757728 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" containerName="glance-log" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.757734 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" containerName="glance-log" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.757975 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" containerName="glance-httpd" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.758068 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ed3ae48-fe00-4892-9117-12b25c129b84" containerName="dnsmasq-dns" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.758078 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" containerName="glance-log" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.759753 4876 scope.go:117] "RemoveContainer" containerID="50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.759904 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.761962 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.762303 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.788963 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-57dd59bc-td2ns"] Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.889200 4876 scope.go:117] "RemoveContainer" containerID="8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.898048 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjqqj\" (UniqueName: \"kubernetes.io/projected/1e9b5486-d793-41dc-b5f1-1f2085d7db79-kube-api-access-jjqqj\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.898180 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-internal-tls-certs\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.898211 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-combined-ca-bundle\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.898328 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-ovndb-tls-certs\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.898355 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-public-tls-certs\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.898381 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-httpd-config\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.898405 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-config\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.899992 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.937038 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.964601 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dc4fcdbc-fclsf"] Dec 15 07:13:07 crc kubenswrapper[4876]: I1215 07:13:07.987472 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5dc4fcdbc-fclsf"] Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.000275 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-ovndb-tls-certs\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.000679 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-public-tls-certs\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.001369 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-httpd-config\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.001531 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-config\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.001678 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjqqj\" (UniqueName: \"kubernetes.io/projected/1e9b5486-d793-41dc-b5f1-1f2085d7db79-kube-api-access-jjqqj\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.002871 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-internal-tls-certs\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.003043 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-combined-ca-bundle\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.005724 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-httpd-config\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.008175 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-internal-tls-certs\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.008621 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-combined-ca-bundle\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.011764 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-config\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.012128 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-ovndb-tls-certs\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.018938 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-public-tls-certs\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.030605 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.032127 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.038481 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.038940 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.043294 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.047992 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjqqj\" (UniqueName: \"kubernetes.io/projected/1e9b5486-d793-41dc-b5f1-1f2085d7db79-kube-api-access-jjqqj\") pod \"neutron-57dd59bc-td2ns\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.189821 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.207129 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-config-data\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.207224 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.207310 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxzlz\" (UniqueName: \"kubernetes.io/projected/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-kube-api-access-vxzlz\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.207331 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.207355 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-scripts\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.207411 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-logs\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.207480 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.207503 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.299237 4876 scope.go:117] "RemoveContainer" containerID="50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c" Dec 15 07:13:08 crc kubenswrapper[4876]: E1215 07:13:08.300119 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c\": container with ID starting with 50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c not found: ID does not exist" containerID="50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.300190 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c"} err="failed to get container status \"50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c\": rpc error: code = NotFound desc = could not find container \"50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c\": container with ID starting with 50884a0ecea0c9af4810d22d64c831ebc05474b9be5cffc0a158aab10ecf802c not found: ID does not exist" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.300212 4876 scope.go:117] "RemoveContainer" containerID="8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d" Dec 15 07:13:08 crc kubenswrapper[4876]: E1215 07:13:08.301862 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d\": container with ID starting with 8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d not found: ID does not exist" containerID="8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.301905 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d"} err="failed to get container status \"8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d\": rpc error: code = NotFound desc = could not find container \"8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d\": container with ID starting with 8921c87e658e1fd7c931ead20eece48c92b3228cc4be23dbfa51c9eb39be2d7d not found: ID does not exist" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.309084 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.309154 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.309199 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-config-data\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.309264 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.309379 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxzlz\" (UniqueName: \"kubernetes.io/projected/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-kube-api-access-vxzlz\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.309411 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.309436 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-scripts\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.309465 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-logs\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.310655 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.310820 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-logs\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.318947 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-scripts\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.324677 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.334469 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.349831 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.352137 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-config-data\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.353002 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-qnzgp" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.356432 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxzlz\" (UniqueName: \"kubernetes.io/projected/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-kube-api-access-vxzlz\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.366677 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.411662 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-scripts\") pod \"46057e1c-873e-40f1-81c2-77c99f416cc7\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.411776 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6krf\" (UniqueName: \"kubernetes.io/projected/46057e1c-873e-40f1-81c2-77c99f416cc7-kube-api-access-t6krf\") pod \"46057e1c-873e-40f1-81c2-77c99f416cc7\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.411837 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-combined-ca-bundle\") pod \"46057e1c-873e-40f1-81c2-77c99f416cc7\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.411946 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46057e1c-873e-40f1-81c2-77c99f416cc7-logs\") pod \"46057e1c-873e-40f1-81c2-77c99f416cc7\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.411972 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-config-data\") pod \"46057e1c-873e-40f1-81c2-77c99f416cc7\" (UID: \"46057e1c-873e-40f1-81c2-77c99f416cc7\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.412417 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46057e1c-873e-40f1-81c2-77c99f416cc7-logs" (OuterVolumeSpecName: "logs") pod "46057e1c-873e-40f1-81c2-77c99f416cc7" (UID: "46057e1c-873e-40f1-81c2-77c99f416cc7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.416217 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-scripts" (OuterVolumeSpecName: "scripts") pod "46057e1c-873e-40f1-81c2-77c99f416cc7" (UID: "46057e1c-873e-40f1-81c2-77c99f416cc7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.429030 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46057e1c-873e-40f1-81c2-77c99f416cc7-kube-api-access-t6krf" (OuterVolumeSpecName: "kube-api-access-t6krf") pod "46057e1c-873e-40f1-81c2-77c99f416cc7" (UID: "46057e1c-873e-40f1-81c2-77c99f416cc7"). InnerVolumeSpecName "kube-api-access-t6krf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.441966 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-config-data" (OuterVolumeSpecName: "config-data") pod "46057e1c-873e-40f1-81c2-77c99f416cc7" (UID: "46057e1c-873e-40f1-81c2-77c99f416cc7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.444546 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46057e1c-873e-40f1-81c2-77c99f416cc7" (UID: "46057e1c-873e-40f1-81c2-77c99f416cc7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.514267 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.514297 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6krf\" (UniqueName: \"kubernetes.io/projected/46057e1c-873e-40f1-81c2-77c99f416cc7-kube-api-access-t6krf\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.514310 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.514318 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46057e1c-873e-40f1-81c2-77c99f416cc7-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.514326 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/46057e1c-873e-40f1-81c2-77c99f416cc7-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.602482 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.631612 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.726636 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwvp9\" (UniqueName: \"kubernetes.io/projected/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-kube-api-access-qwvp9\") pod \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.726675 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.726703 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-scripts\") pod \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.726838 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-httpd-run\") pod \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.726905 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-combined-ca-bundle\") pod \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.726963 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-config-data\") pod \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.726985 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-logs\") pod \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\" (UID: \"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6\") " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.727650 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-logs" (OuterVolumeSpecName: "logs") pod "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" (UID: "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.727923 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" (UID: "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.739416 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-scripts" (OuterVolumeSpecName: "scripts") pod "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" (UID: "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.741899 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-qnzgp" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.744244 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ed3ae48-fe00-4892-9117-12b25c129b84" path="/var/lib/kubelet/pods/7ed3ae48-fe00-4892-9117-12b25c129b84/volumes" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.745150 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de8bd253-c7e8-4315-8afc-ad0d2046cd12" path="/var/lib/kubelet/pods/de8bd253-c7e8-4315-8afc-ad0d2046cd12/volumes" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.750064 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" (UID: "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.750579 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-kube-api-access-qwvp9" (OuterVolumeSpecName: "kube-api-access-qwvp9") pod "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" (UID: "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6"). InnerVolumeSpecName "kube-api-access-qwvp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.757844 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-qnzgp" event={"ID":"46057e1c-873e-40f1-81c2-77c99f416cc7","Type":"ContainerDied","Data":"5ccaadbdec1b0fdc2f650718dc952ef818ca92f95d1bf3d236f04c1403320b8a"} Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.757885 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ccaadbdec1b0fdc2f650718dc952ef818ca92f95d1bf3d236f04c1403320b8a" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.782218 4876 generic.go:334] "Generic (PLEG): container finished" podID="ad8a1b74-0614-4fb4-8f83-989c5a8b475e" containerID="65c96db663a1d570b833d012a7e747aecfa41909ee101b29781a46429cfe9dca" exitCode=0 Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.782306 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" event={"ID":"ad8a1b74-0614-4fb4-8f83-989c5a8b475e","Type":"ContainerDied","Data":"65c96db663a1d570b833d012a7e747aecfa41909ee101b29781a46429cfe9dca"} Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.795476 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f6fb4f468-c4zbx" event={"ID":"f0d3517e-4f36-4dfb-a18a-73b63620b6cf","Type":"ContainerStarted","Data":"5feb63996ef0003d1a572bf59e6097bdf6695e712ecbb2ad4f4a2e78e9088c8a"} Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.795523 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f6fb4f468-c4zbx" event={"ID":"f0d3517e-4f36-4dfb-a18a-73b63620b6cf","Type":"ContainerStarted","Data":"ed610484fe9156f9f3346af05c9736e7afab98e667a77ccff218365c06cb741a"} Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.795744 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.817181 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-64c85ddd54-vd84c"] Dec 15 07:13:08 crc kubenswrapper[4876]: E1215 07:13:08.817920 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" containerName="glance-log" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.818004 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" containerName="glance-log" Dec 15 07:13:08 crc kubenswrapper[4876]: E1215 07:13:08.818072 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46057e1c-873e-40f1-81c2-77c99f416cc7" containerName="placement-db-sync" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.818262 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="46057e1c-873e-40f1-81c2-77c99f416cc7" containerName="placement-db-sync" Dec 15 07:13:08 crc kubenswrapper[4876]: E1215 07:13:08.818337 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" containerName="glance-httpd" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.818386 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" containerName="glance-httpd" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.818653 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" containerName="glance-httpd" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.818765 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" containerName="glance-log" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.818817 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="46057e1c-873e-40f1-81c2-77c99f416cc7" containerName="placement-db-sync" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.819727 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.826446 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.826816 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.827083 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.827275 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-r4vkj" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.828922 4876 generic.go:334] "Generic (PLEG): container finished" podID="fb7788fd-6d1c-4251-9629-2f550c34b522" containerID="a0a8d5ff29d8c7fb66681edfebb386b1c81d12a16bcc7db3d27a1bac7237e1fd" exitCode=0 Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.829013 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8jtjq" event={"ID":"fb7788fd-6d1c-4251-9629-2f550c34b522","Type":"ContainerDied","Data":"a0a8d5ff29d8c7fb66681edfebb386b1c81d12a16bcc7db3d27a1bac7237e1fd"} Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.829876 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.832450 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.832554 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwvp9\" (UniqueName: \"kubernetes.io/projected/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-kube-api-access-qwvp9\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.832610 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.832664 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.832736 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.833279 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-config-data" (OuterVolumeSpecName: "config-data") pod "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" (UID: "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.851458 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" (UID: "fe6bf2c3-e853-4b33-9a4e-f96c504aaba6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.851674 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-64c85ddd54-vd84c"] Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.852994 4876 generic.go:334] "Generic (PLEG): container finished" podID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" containerID="3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4" exitCode=0 Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.853185 4876 generic.go:334] "Generic (PLEG): container finished" podID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" containerID="38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655" exitCode=143 Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.853263 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6","Type":"ContainerDied","Data":"3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4"} Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.853339 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6","Type":"ContainerDied","Data":"38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655"} Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.853415 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fe6bf2c3-e853-4b33-9a4e-f96c504aaba6","Type":"ContainerDied","Data":"5a024ee269cb070b5b4726257c006dd4f8b0845af671b04d95beebe8b522f5f0"} Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.853495 4876 scope.go:117] "RemoveContainer" containerID="3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.853670 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.904330 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.947510 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-scripts\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.947784 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-internal-tls-certs\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.949341 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-config-data\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.950925 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-combined-ca-bundle\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.951049 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz82f\" (UniqueName: \"kubernetes.io/projected/23569e4d-9d69-4947-9293-50d1667c1eda-kube-api-access-qz82f\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.951236 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23569e4d-9d69-4947-9293-50d1667c1eda-logs\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.951440 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-public-tls-certs\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.953340 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.953455 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.953530 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:08 crc kubenswrapper[4876]: I1215 07:13:08.975560 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6f6fb4f468-c4zbx" podStartSLOduration=3.975531936 podStartE2EDuration="3.975531936s" podCreationTimestamp="2025-12-15 07:13:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:08.942825968 +0000 UTC m=+1314.513968889" watchObservedRunningTime="2025-12-15 07:13:08.975531936 +0000 UTC m=+1314.546674847" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.036359 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-57dd59bc-td2ns"] Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.055613 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-config-data\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.055656 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-combined-ca-bundle\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.055676 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz82f\" (UniqueName: \"kubernetes.io/projected/23569e4d-9d69-4947-9293-50d1667c1eda-kube-api-access-qz82f\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.055699 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23569e4d-9d69-4947-9293-50d1667c1eda-logs\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.055732 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-public-tls-certs\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.056044 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-scripts\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.056095 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-internal-tls-certs\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.059778 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23569e4d-9d69-4947-9293-50d1667c1eda-logs\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.062845 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-scripts\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.065371 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-internal-tls-certs\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.066865 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-config-data\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.068869 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-public-tls-certs\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.069299 4876 scope.go:117] "RemoveContainer" containerID="38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.070300 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-combined-ca-bundle\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.084179 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.093271 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz82f\" (UniqueName: \"kubernetes.io/projected/23569e4d-9d69-4947-9293-50d1667c1eda-kube-api-access-qz82f\") pod \"placement-64c85ddd54-vd84c\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.102713 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.127788 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.142965 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.144698 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.148419 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.148825 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.149136 4876 scope.go:117] "RemoveContainer" containerID="3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4" Dec 15 07:13:09 crc kubenswrapper[4876]: E1215 07:13:09.160403 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4\": container with ID starting with 3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4 not found: ID does not exist" containerID="3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.160659 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4"} err="failed to get container status \"3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4\": rpc error: code = NotFound desc = could not find container \"3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4\": container with ID starting with 3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4 not found: ID does not exist" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.164484 4876 scope.go:117] "RemoveContainer" containerID="38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.166418 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:13:09 crc kubenswrapper[4876]: E1215 07:13:09.169339 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655\": container with ID starting with 38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655 not found: ID does not exist" containerID="38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.169389 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655"} err="failed to get container status \"38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655\": rpc error: code = NotFound desc = could not find container \"38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655\": container with ID starting with 38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655 not found: ID does not exist" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.169420 4876 scope.go:117] "RemoveContainer" containerID="3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.173406 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4"} err="failed to get container status \"3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4\": rpc error: code = NotFound desc = could not find container \"3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4\": container with ID starting with 3f072098eec9249dabab1383b732ff219f7d36cbd2059c88556d53ed8d70c7a4 not found: ID does not exist" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.173468 4876 scope.go:117] "RemoveContainer" containerID="38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.174820 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655"} err="failed to get container status \"38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655\": rpc error: code = NotFound desc = could not find container \"38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655\": container with ID starting with 38cce662720868f07a70a634607a8450af724241bb7622039c93ce24552a8655 not found: ID does not exist" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.263032 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.263944 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndncb\" (UniqueName: \"kubernetes.io/projected/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-kube-api-access-ndncb\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.264088 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.264204 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.264355 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.265349 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.265477 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.265569 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-logs\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.299996 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:13:09 crc kubenswrapper[4876]: W1215 07:13:09.324920 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod764e8736_0f6f_49ee_9ca8_d6dd98ddf969.slice/crio-ee66c89683db4aa505c096aedb1f92d696e0b3188ad23ae3fd23059483e7bf95 WatchSource:0}: Error finding container ee66c89683db4aa505c096aedb1f92d696e0b3188ad23ae3fd23059483e7bf95: Status 404 returned error can't find the container with id ee66c89683db4aa505c096aedb1f92d696e0b3188ad23ae3fd23059483e7bf95 Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.368266 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.368375 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.368433 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.368467 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-logs\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.368522 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.368556 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndncb\" (UniqueName: \"kubernetes.io/projected/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-kube-api-access-ndncb\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.368582 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.368598 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.368977 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.369353 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-logs\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.369599 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.377370 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.377388 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.381142 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.383877 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.397225 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndncb\" (UniqueName: \"kubernetes.io/projected/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-kube-api-access-ndncb\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.417871 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.470403 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.872346 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-64c85ddd54-vd84c"] Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.895181 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"764e8736-0f6f-49ee-9ca8-d6dd98ddf969","Type":"ContainerStarted","Data":"ee66c89683db4aa505c096aedb1f92d696e0b3188ad23ae3fd23059483e7bf95"} Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.921624 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-57dd59bc-td2ns" event={"ID":"1e9b5486-d793-41dc-b5f1-1f2085d7db79","Type":"ContainerStarted","Data":"281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6"} Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.921692 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-57dd59bc-td2ns" event={"ID":"1e9b5486-d793-41dc-b5f1-1f2085d7db79","Type":"ContainerStarted","Data":"5cdd3742f3dafd4d1b2686f6ba04e093fd1505fccab185b64c0342c8f4cf64aa"} Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.923019 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.958325 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-57dd59bc-td2ns" podStartSLOduration=2.958304209 podStartE2EDuration="2.958304209s" podCreationTimestamp="2025-12-15 07:13:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:09.944040486 +0000 UTC m=+1315.515183397" watchObservedRunningTime="2025-12-15 07:13:09.958304209 +0000 UTC m=+1315.529447120" Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.966244 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" event={"ID":"ad8a1b74-0614-4fb4-8f83-989c5a8b475e","Type":"ContainerStarted","Data":"2fd2db1b9b1b6995a463ea1817502dd08769e677c2e02fb0e8f2fcd8d76c1cc3"} Dec 15 07:13:09 crc kubenswrapper[4876]: I1215 07:13:09.966357 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:10 crc kubenswrapper[4876]: I1215 07:13:10.002619 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" podStartSLOduration=5.002594438 podStartE2EDuration="5.002594438s" podCreationTimestamp="2025-12-15 07:13:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:09.995822285 +0000 UTC m=+1315.566965216" watchObservedRunningTime="2025-12-15 07:13:10.002594438 +0000 UTC m=+1315.573737349" Dec 15 07:13:10 crc kubenswrapper[4876]: I1215 07:13:10.750059 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe6bf2c3-e853-4b33-9a4e-f96c504aaba6" path="/var/lib/kubelet/pods/fe6bf2c3-e853-4b33-9a4e-f96c504aaba6/volumes" Dec 15 07:13:10 crc kubenswrapper[4876]: I1215 07:13:10.760982 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:13:10 crc kubenswrapper[4876]: W1215 07:13:10.767544 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba07dd8d_d3e0_4981_a0d4_1339fdb451c0.slice/crio-6e44b3a71e7d63692d6279cee0b8cc2ce7fad37b09a07d1d7f9930ab1f5fc512 WatchSource:0}: Error finding container 6e44b3a71e7d63692d6279cee0b8cc2ce7fad37b09a07d1d7f9930ab1f5fc512: Status 404 returned error can't find the container with id 6e44b3a71e7d63692d6279cee0b8cc2ce7fad37b09a07d1d7f9930ab1f5fc512 Dec 15 07:13:10 crc kubenswrapper[4876]: I1215 07:13:10.921917 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.033378 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-57dd59bc-td2ns" event={"ID":"1e9b5486-d793-41dc-b5f1-1f2085d7db79","Type":"ContainerStarted","Data":"9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6"} Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.053547 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-config-data\") pod \"fb7788fd-6d1c-4251-9629-2f550c34b522\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.053744 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr6g7\" (UniqueName: \"kubernetes.io/projected/fb7788fd-6d1c-4251-9629-2f550c34b522-kube-api-access-sr6g7\") pod \"fb7788fd-6d1c-4251-9629-2f550c34b522\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.053815 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-fernet-keys\") pod \"fb7788fd-6d1c-4251-9629-2f550c34b522\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.053909 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-combined-ca-bundle\") pod \"fb7788fd-6d1c-4251-9629-2f550c34b522\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.053998 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-credential-keys\") pod \"fb7788fd-6d1c-4251-9629-2f550c34b522\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.054088 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-scripts\") pod \"fb7788fd-6d1c-4251-9629-2f550c34b522\" (UID: \"fb7788fd-6d1c-4251-9629-2f550c34b522\") " Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.090363 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "fb7788fd-6d1c-4251-9629-2f550c34b522" (UID: "fb7788fd-6d1c-4251-9629-2f550c34b522"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.090547 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-scripts" (OuterVolumeSpecName: "scripts") pod "fb7788fd-6d1c-4251-9629-2f550c34b522" (UID: "fb7788fd-6d1c-4251-9629-2f550c34b522"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.091064 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb7788fd-6d1c-4251-9629-2f550c34b522-kube-api-access-sr6g7" (OuterVolumeSpecName: "kube-api-access-sr6g7") pod "fb7788fd-6d1c-4251-9629-2f550c34b522" (UID: "fb7788fd-6d1c-4251-9629-2f550c34b522"). InnerVolumeSpecName "kube-api-access-sr6g7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.112425 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0","Type":"ContainerStarted","Data":"6e44b3a71e7d63692d6279cee0b8cc2ce7fad37b09a07d1d7f9930ab1f5fc512"} Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.112494 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "fb7788fd-6d1c-4251-9629-2f550c34b522" (UID: "fb7788fd-6d1c-4251-9629-2f550c34b522"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.220366 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.220407 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr6g7\" (UniqueName: \"kubernetes.io/projected/fb7788fd-6d1c-4251-9629-2f550c34b522-kube-api-access-sr6g7\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.220418 4876 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.220426 4876 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.222591 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-64c85ddd54-vd84c" event={"ID":"23569e4d-9d69-4947-9293-50d1667c1eda","Type":"ContainerStarted","Data":"b7cee017897c12539c9299fd75ae6631f0e2df1da620d7ed092d9f4e6c534e80"} Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.226880 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-8jtjq" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.227018 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-8jtjq" event={"ID":"fb7788fd-6d1c-4251-9629-2f550c34b522","Type":"ContainerDied","Data":"8045507e8bd0fe48cbd27a3f3b717fe7afc1bc3d2d0105c99ca01070497fecf6"} Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.227033 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8045507e8bd0fe48cbd27a3f3b717fe7afc1bc3d2d0105c99ca01070497fecf6" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.358283 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-config-data" (OuterVolumeSpecName: "config-data") pod "fb7788fd-6d1c-4251-9629-2f550c34b522" (UID: "fb7788fd-6d1c-4251-9629-2f550c34b522"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.390455 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb7788fd-6d1c-4251-9629-2f550c34b522" (UID: "fb7788fd-6d1c-4251-9629-2f550c34b522"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.423671 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:11 crc kubenswrapper[4876]: I1215 07:13:11.423711 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb7788fd-6d1c-4251-9629-2f550c34b522-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.123375 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-8689b9f5b5-zrv9l"] Dec 15 07:13:12 crc kubenswrapper[4876]: E1215 07:13:12.124551 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb7788fd-6d1c-4251-9629-2f550c34b522" containerName="keystone-bootstrap" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.124569 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb7788fd-6d1c-4251-9629-2f550c34b522" containerName="keystone-bootstrap" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.124821 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb7788fd-6d1c-4251-9629-2f550c34b522" containerName="keystone-bootstrap" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.128194 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.143169 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.146009 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.146827 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.147286 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.147448 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.147582 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-v76mv" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.264981 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8689b9f5b5-zrv9l"] Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.278059 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"764e8736-0f6f-49ee-9ca8-d6dd98ddf969","Type":"ContainerStarted","Data":"b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f"} Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.280504 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0","Type":"ContainerStarted","Data":"5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b"} Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.290641 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-64c85ddd54-vd84c" event={"ID":"23569e4d-9d69-4947-9293-50d1667c1eda","Type":"ContainerStarted","Data":"cef34266913f1af42f423b9d794de57144b0ceaffd39d3e744bc8fb3a3cb3355"} Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.291042 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-64c85ddd54-vd84c" event={"ID":"23569e4d-9d69-4947-9293-50d1667c1eda","Type":"ContainerStarted","Data":"08cefbd86f57e35feb0652dfa8a1d0ef70620766258370c430d6aa69ef015c8f"} Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.292246 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.408241 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-credential-keys\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.408308 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-scripts\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.408341 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-internal-tls-certs\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.408408 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-public-tls-certs\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.408490 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-combined-ca-bundle\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.408536 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmz8z\" (UniqueName: \"kubernetes.io/projected/d724d787-7189-4c81-94bf-08c2904deaf9-kube-api-access-mmz8z\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.408553 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-config-data\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.408588 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-fernet-keys\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.433649 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-64c85ddd54-vd84c" podStartSLOduration=4.433628461 podStartE2EDuration="4.433628461s" podCreationTimestamp="2025-12-15 07:13:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:12.401314272 +0000 UTC m=+1317.972457193" watchObservedRunningTime="2025-12-15 07:13:12.433628461 +0000 UTC m=+1318.004771382" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.514326 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-credential-keys\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.514421 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-scripts\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.514506 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-internal-tls-certs\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.514562 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-public-tls-certs\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.514618 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-combined-ca-bundle\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.514646 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmz8z\" (UniqueName: \"kubernetes.io/projected/d724d787-7189-4c81-94bf-08c2904deaf9-kube-api-access-mmz8z\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.514662 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-config-data\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.514690 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-fernet-keys\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.519735 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-fernet-keys\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.520879 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-credential-keys\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.522306 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-internal-tls-certs\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.524804 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-config-data\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.527458 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-public-tls-certs\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.528235 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-combined-ca-bundle\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.531557 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-scripts\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.545759 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmz8z\" (UniqueName: \"kubernetes.io/projected/d724d787-7189-4c81-94bf-08c2904deaf9-kube-api-access-mmz8z\") pod \"keystone-8689b9f5b5-zrv9l\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:12 crc kubenswrapper[4876]: I1215 07:13:12.979354 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:13 crc kubenswrapper[4876]: I1215 07:13:13.317161 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"764e8736-0f6f-49ee-9ca8-d6dd98ddf969","Type":"ContainerStarted","Data":"0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4"} Dec 15 07:13:13 crc kubenswrapper[4876]: I1215 07:13:13.340950 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0","Type":"ContainerStarted","Data":"64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a"} Dec 15 07:13:13 crc kubenswrapper[4876]: I1215 07:13:13.341141 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:13 crc kubenswrapper[4876]: I1215 07:13:13.361733 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.361704965 podStartE2EDuration="6.361704965s" podCreationTimestamp="2025-12-15 07:13:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:13.348208159 +0000 UTC m=+1318.919351090" watchObservedRunningTime="2025-12-15 07:13:13.361704965 +0000 UTC m=+1318.932847876" Dec 15 07:13:13 crc kubenswrapper[4876]: I1215 07:13:13.385991 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.38597348 podStartE2EDuration="4.38597348s" podCreationTimestamp="2025-12-15 07:13:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:13.383006498 +0000 UTC m=+1318.954149549" watchObservedRunningTime="2025-12-15 07:13:13.38597348 +0000 UTC m=+1318.957116391" Dec 15 07:13:15 crc kubenswrapper[4876]: I1215 07:13:15.723330 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:15 crc kubenswrapper[4876]: I1215 07:13:15.790604 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-2fnbt"] Dec 15 07:13:15 crc kubenswrapper[4876]: I1215 07:13:15.790928 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" podUID="6cbe0e15-c52e-43d4-986c-ace016ab598e" containerName="dnsmasq-dns" containerID="cri-o://c807ad69fbc79e09e32d13f8b7b357644c1f9fa31582855fed18f01492071360" gracePeriod=10 Dec 15 07:13:16 crc kubenswrapper[4876]: I1215 07:13:16.375587 4876 generic.go:334] "Generic (PLEG): container finished" podID="6cbe0e15-c52e-43d4-986c-ace016ab598e" containerID="c807ad69fbc79e09e32d13f8b7b357644c1f9fa31582855fed18f01492071360" exitCode=0 Dec 15 07:13:16 crc kubenswrapper[4876]: I1215 07:13:16.375655 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" event={"ID":"6cbe0e15-c52e-43d4-986c-ace016ab598e","Type":"ContainerDied","Data":"c807ad69fbc79e09e32d13f8b7b357644c1f9fa31582855fed18f01492071360"} Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.186954 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.311941 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-config\") pod \"6cbe0e15-c52e-43d4-986c-ace016ab598e\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.312230 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-nb\") pod \"6cbe0e15-c52e-43d4-986c-ace016ab598e\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.312284 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-sb\") pod \"6cbe0e15-c52e-43d4-986c-ace016ab598e\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.312390 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-dns-svc\") pod \"6cbe0e15-c52e-43d4-986c-ace016ab598e\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.312526 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbc49\" (UniqueName: \"kubernetes.io/projected/6cbe0e15-c52e-43d4-986c-ace016ab598e-kube-api-access-fbc49\") pod \"6cbe0e15-c52e-43d4-986c-ace016ab598e\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.317139 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cbe0e15-c52e-43d4-986c-ace016ab598e-kube-api-access-fbc49" (OuterVolumeSpecName: "kube-api-access-fbc49") pod "6cbe0e15-c52e-43d4-986c-ace016ab598e" (UID: "6cbe0e15-c52e-43d4-986c-ace016ab598e"). InnerVolumeSpecName "kube-api-access-fbc49". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.387678 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" event={"ID":"6cbe0e15-c52e-43d4-986c-ace016ab598e","Type":"ContainerDied","Data":"4c8fa49e413fba19144c3582c46453dbc4e1e5afa5aec6106e8b8844fcde11df"} Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.387728 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb545bd4c-2fnbt" Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.388372 4876 scope.go:117] "RemoveContainer" containerID="c807ad69fbc79e09e32d13f8b7b357644c1f9fa31582855fed18f01492071360" Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.414720 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbc49\" (UniqueName: \"kubernetes.io/projected/6cbe0e15-c52e-43d4-986c-ace016ab598e-kube-api-access-fbc49\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.819974 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6cbe0e15-c52e-43d4-986c-ace016ab598e" (UID: "6cbe0e15-c52e-43d4-986c-ace016ab598e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.822218 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:17 crc kubenswrapper[4876]: E1215 07:13:17.823450 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-config podName:6cbe0e15-c52e-43d4-986c-ace016ab598e nodeName:}" failed. No retries permitted until 2025-12-15 07:13:18.323429456 +0000 UTC m=+1323.894572367 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config" (UniqueName: "kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-config") pod "6cbe0e15-c52e-43d4-986c-ace016ab598e" (UID: "6cbe0e15-c52e-43d4-986c-ace016ab598e") : error deleting /var/lib/kubelet/pods/6cbe0e15-c52e-43d4-986c-ace016ab598e/volume-subpaths: remove /var/lib/kubelet/pods/6cbe0e15-c52e-43d4-986c-ace016ab598e/volume-subpaths: no such file or directory Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.823694 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6cbe0e15-c52e-43d4-986c-ace016ab598e" (UID: "6cbe0e15-c52e-43d4-986c-ace016ab598e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.824077 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6cbe0e15-c52e-43d4-986c-ace016ab598e" (UID: "6cbe0e15-c52e-43d4-986c-ace016ab598e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.866198 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-8689b9f5b5-zrv9l"] Dec 15 07:13:17 crc kubenswrapper[4876]: W1215 07:13:17.867690 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd724d787_7189_4c81_94bf_08c2904deaf9.slice/crio-3d42f3f033fbb11378e4eef1568eca239a8d49b3bbcb47d9876068405c492a6c WatchSource:0}: Error finding container 3d42f3f033fbb11378e4eef1568eca239a8d49b3bbcb47d9876068405c492a6c: Status 404 returned error can't find the container with id 3d42f3f033fbb11378e4eef1568eca239a8d49b3bbcb47d9876068405c492a6c Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.925287 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:17 crc kubenswrapper[4876]: I1215 07:13:17.925315 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.000451 4876 scope.go:117] "RemoveContainer" containerID="6ca18e18f554fb532d60b436b864f1ba0e4abb0dabb3d447eca9979d0d5e76df" Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.333892 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-config\") pod \"6cbe0e15-c52e-43d4-986c-ace016ab598e\" (UID: \"6cbe0e15-c52e-43d4-986c-ace016ab598e\") " Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.335539 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-config" (OuterVolumeSpecName: "config") pod "6cbe0e15-c52e-43d4-986c-ace016ab598e" (UID: "6cbe0e15-c52e-43d4-986c-ace016ab598e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.400387 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8689b9f5b5-zrv9l" event={"ID":"d724d787-7189-4c81-94bf-08c2904deaf9","Type":"ContainerStarted","Data":"1332d0e465fbd1defca4d9cc7fb36daa61b5bbbf7ddc14584dbbe92e062e9452"} Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.400441 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8689b9f5b5-zrv9l" event={"ID":"d724d787-7189-4c81-94bf-08c2904deaf9","Type":"ContainerStarted","Data":"3d42f3f033fbb11378e4eef1568eca239a8d49b3bbcb47d9876068405c492a6c"} Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.400740 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.403255 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e81fc70-c9d6-4196-8a2b-cc110e37e18e","Type":"ContainerStarted","Data":"dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573"} Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.424080 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-8689b9f5b5-zrv9l" podStartSLOduration=6.424058392 podStartE2EDuration="6.424058392s" podCreationTimestamp="2025-12-15 07:13:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:18.417940064 +0000 UTC m=+1323.989082985" watchObservedRunningTime="2025-12-15 07:13:18.424058392 +0000 UTC m=+1323.995201303" Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.439098 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cbe0e15-c52e-43d4-986c-ace016ab598e-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.603372 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.603750 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.634636 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-2fnbt"] Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.635230 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.648813 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6cb545bd4c-2fnbt"] Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.673243 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 15 07:13:18 crc kubenswrapper[4876]: I1215 07:13:18.729433 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cbe0e15-c52e-43d4-986c-ace016ab598e" path="/var/lib/kubelet/pods/6cbe0e15-c52e-43d4-986c-ace016ab598e/volumes" Dec 15 07:13:19 crc kubenswrapper[4876]: I1215 07:13:19.415376 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-s6h7g" event={"ID":"88bc8b14-b04f-4062-b400-f8387184810e","Type":"ContainerStarted","Data":"a9f4a3c24d47c88f332ff32aad0cf2a4aada60f557ecd665944aa01164bc7368"} Dec 15 07:13:19 crc kubenswrapper[4876]: I1215 07:13:19.416082 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 15 07:13:19 crc kubenswrapper[4876]: I1215 07:13:19.416127 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 15 07:13:19 crc kubenswrapper[4876]: I1215 07:13:19.446464 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-s6h7g" podStartSLOduration=2.803547189 podStartE2EDuration="38.446444201s" podCreationTimestamp="2025-12-15 07:12:41 +0000 UTC" firstStartedPulling="2025-12-15 07:12:43.173242247 +0000 UTC m=+1288.744385168" lastFinishedPulling="2025-12-15 07:13:18.816139269 +0000 UTC m=+1324.387282180" observedRunningTime="2025-12-15 07:13:19.43521592 +0000 UTC m=+1325.006358841" watchObservedRunningTime="2025-12-15 07:13:19.446444201 +0000 UTC m=+1325.017587112" Dec 15 07:13:19 crc kubenswrapper[4876]: I1215 07:13:19.471516 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:19 crc kubenswrapper[4876]: I1215 07:13:19.471585 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:19 crc kubenswrapper[4876]: I1215 07:13:19.525857 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:19 crc kubenswrapper[4876]: I1215 07:13:19.537953 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:20 crc kubenswrapper[4876]: I1215 07:13:20.434848 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:20 crc kubenswrapper[4876]: I1215 07:13:20.435872 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:21 crc kubenswrapper[4876]: I1215 07:13:21.451574 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mx7zg" event={"ID":"aaaef001-e178-4e64-80b1-e86fbd15ba8e","Type":"ContainerStarted","Data":"6649c600f8a029cdee4c0de05db35b7d2ab1a9c6655e3285739700ef5c19c813"} Dec 15 07:13:21 crc kubenswrapper[4876]: I1215 07:13:21.495763 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-mx7zg" podStartSLOduration=3.037676963 podStartE2EDuration="40.495743106s" podCreationTimestamp="2025-12-15 07:12:41 +0000 UTC" firstStartedPulling="2025-12-15 07:12:42.926231168 +0000 UTC m=+1288.497374079" lastFinishedPulling="2025-12-15 07:13:20.384297311 +0000 UTC m=+1325.955440222" observedRunningTime="2025-12-15 07:13:21.489040465 +0000 UTC m=+1327.060183396" watchObservedRunningTime="2025-12-15 07:13:21.495743106 +0000 UTC m=+1327.066886027" Dec 15 07:13:21 crc kubenswrapper[4876]: I1215 07:13:21.678709 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 15 07:13:21 crc kubenswrapper[4876]: I1215 07:13:21.679065 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 07:13:21 crc kubenswrapper[4876]: I1215 07:13:21.828906 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 15 07:13:22 crc kubenswrapper[4876]: I1215 07:13:22.464680 4876 generic.go:334] "Generic (PLEG): container finished" podID="88bc8b14-b04f-4062-b400-f8387184810e" containerID="a9f4a3c24d47c88f332ff32aad0cf2a4aada60f557ecd665944aa01164bc7368" exitCode=0 Dec 15 07:13:22 crc kubenswrapper[4876]: I1215 07:13:22.465541 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-s6h7g" event={"ID":"88bc8b14-b04f-4062-b400-f8387184810e","Type":"ContainerDied","Data":"a9f4a3c24d47c88f332ff32aad0cf2a4aada60f557ecd665944aa01164bc7368"} Dec 15 07:13:22 crc kubenswrapper[4876]: I1215 07:13:22.465872 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 07:13:22 crc kubenswrapper[4876]: I1215 07:13:22.465897 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 07:13:22 crc kubenswrapper[4876]: I1215 07:13:22.639623 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:22 crc kubenswrapper[4876]: I1215 07:13:22.703380 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 15 07:13:26 crc kubenswrapper[4876]: I1215 07:13:26.504466 4876 generic.go:334] "Generic (PLEG): container finished" podID="aaaef001-e178-4e64-80b1-e86fbd15ba8e" containerID="6649c600f8a029cdee4c0de05db35b7d2ab1a9c6655e3285739700ef5c19c813" exitCode=0 Dec 15 07:13:26 crc kubenswrapper[4876]: I1215 07:13:26.504676 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mx7zg" event={"ID":"aaaef001-e178-4e64-80b1-e86fbd15ba8e","Type":"ContainerDied","Data":"6649c600f8a029cdee4c0de05db35b7d2ab1a9c6655e3285739700ef5c19c813"} Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.129155 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.231950 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-combined-ca-bundle\") pod \"88bc8b14-b04f-4062-b400-f8387184810e\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.233536 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvstj\" (UniqueName: \"kubernetes.io/projected/88bc8b14-b04f-4062-b400-f8387184810e-kube-api-access-zvstj\") pod \"88bc8b14-b04f-4062-b400-f8387184810e\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.233662 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-db-sync-config-data\") pod \"88bc8b14-b04f-4062-b400-f8387184810e\" (UID: \"88bc8b14-b04f-4062-b400-f8387184810e\") " Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.240676 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "88bc8b14-b04f-4062-b400-f8387184810e" (UID: "88bc8b14-b04f-4062-b400-f8387184810e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.240765 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88bc8b14-b04f-4062-b400-f8387184810e-kube-api-access-zvstj" (OuterVolumeSpecName: "kube-api-access-zvstj") pod "88bc8b14-b04f-4062-b400-f8387184810e" (UID: "88bc8b14-b04f-4062-b400-f8387184810e"). InnerVolumeSpecName "kube-api-access-zvstj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.256879 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88bc8b14-b04f-4062-b400-f8387184810e" (UID: "88bc8b14-b04f-4062-b400-f8387184810e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.322322 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.322440 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.322492 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.323222 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f7ed19e30015229d68cc3e45a01331aa4c6921bd64f97228380c22118f785d84"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.323290 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://f7ed19e30015229d68cc3e45a01331aa4c6921bd64f97228380c22118f785d84" gracePeriod=600 Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.336159 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvstj\" (UniqueName: \"kubernetes.io/projected/88bc8b14-b04f-4062-b400-f8387184810e-kube-api-access-zvstj\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.336201 4876 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.336212 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88bc8b14-b04f-4062-b400-f8387184810e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.516370 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="f7ed19e30015229d68cc3e45a01331aa4c6921bd64f97228380c22118f785d84" exitCode=0 Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.516588 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"f7ed19e30015229d68cc3e45a01331aa4c6921bd64f97228380c22118f785d84"} Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.516819 4876 scope.go:117] "RemoveContainer" containerID="4baa74438474f842b95837cb376641c83032001b2fc70604008ed449440b26a2" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.520354 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e81fc70-c9d6-4196-8a2b-cc110e37e18e","Type":"ContainerStarted","Data":"cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f"} Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.520514 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="ceilometer-central-agent" containerID="cri-o://0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265" gracePeriod=30 Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.520800 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.521181 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="proxy-httpd" containerID="cri-o://cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f" gracePeriod=30 Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.521252 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="sg-core" containerID="cri-o://dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573" gracePeriod=30 Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.521301 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="ceilometer-notification-agent" containerID="cri-o://86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa" gracePeriod=30 Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.524641 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-s6h7g" event={"ID":"88bc8b14-b04f-4062-b400-f8387184810e","Type":"ContainerDied","Data":"9a778577a755c720b733e8a5070f1039c0f4bb2f2cb2ddc9260db1e57f73d3b2"} Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.524689 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a778577a755c720b733e8a5070f1039c0f4bb2f2cb2ddc9260db1e57f73d3b2" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.524664 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-s6h7g" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.547899 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.944902677 podStartE2EDuration="46.547874266s" podCreationTimestamp="2025-12-15 07:12:41 +0000 UTC" firstStartedPulling="2025-12-15 07:12:42.519742362 +0000 UTC m=+1288.090885273" lastFinishedPulling="2025-12-15 07:13:27.122713951 +0000 UTC m=+1332.693856862" observedRunningTime="2025-12-15 07:13:27.542212109 +0000 UTC m=+1333.113355050" watchObservedRunningTime="2025-12-15 07:13:27.547874266 +0000 UTC m=+1333.119017177" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.807481 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.956075 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aaaef001-e178-4e64-80b1-e86fbd15ba8e-etc-machine-id\") pod \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.956468 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-db-sync-config-data\") pod \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.956508 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-config-data\") pod \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.956171 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aaaef001-e178-4e64-80b1-e86fbd15ba8e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "aaaef001-e178-4e64-80b1-e86fbd15ba8e" (UID: "aaaef001-e178-4e64-80b1-e86fbd15ba8e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.956538 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-scripts\") pod \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.956616 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcz5s\" (UniqueName: \"kubernetes.io/projected/aaaef001-e178-4e64-80b1-e86fbd15ba8e-kube-api-access-pcz5s\") pod \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.956639 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-combined-ca-bundle\") pod \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\" (UID: \"aaaef001-e178-4e64-80b1-e86fbd15ba8e\") " Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.956982 4876 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aaaef001-e178-4e64-80b1-e86fbd15ba8e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.962257 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "aaaef001-e178-4e64-80b1-e86fbd15ba8e" (UID: "aaaef001-e178-4e64-80b1-e86fbd15ba8e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.962694 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaaef001-e178-4e64-80b1-e86fbd15ba8e-kube-api-access-pcz5s" (OuterVolumeSpecName: "kube-api-access-pcz5s") pod "aaaef001-e178-4e64-80b1-e86fbd15ba8e" (UID: "aaaef001-e178-4e64-80b1-e86fbd15ba8e"). InnerVolumeSpecName "kube-api-access-pcz5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.962774 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-scripts" (OuterVolumeSpecName: "scripts") pod "aaaef001-e178-4e64-80b1-e86fbd15ba8e" (UID: "aaaef001-e178-4e64-80b1-e86fbd15ba8e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:27 crc kubenswrapper[4876]: I1215 07:13:27.982296 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aaaef001-e178-4e64-80b1-e86fbd15ba8e" (UID: "aaaef001-e178-4e64-80b1-e86fbd15ba8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.004072 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-config-data" (OuterVolumeSpecName: "config-data") pod "aaaef001-e178-4e64-80b1-e86fbd15ba8e" (UID: "aaaef001-e178-4e64-80b1-e86fbd15ba8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.058547 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcz5s\" (UniqueName: \"kubernetes.io/projected/aaaef001-e178-4e64-80b1-e86fbd15ba8e-kube-api-access-pcz5s\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.058591 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.058606 4876 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.058619 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.058633 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaef001-e178-4e64-80b1-e86fbd15ba8e-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.532797 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mx7zg" event={"ID":"aaaef001-e178-4e64-80b1-e86fbd15ba8e","Type":"ContainerDied","Data":"a4000ed45f8582d997ccab1951c431791ad0c56a1ceca0393ee5366a47d4da5f"} Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.533182 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4000ed45f8582d997ccab1951c431791ad0c56a1ceca0393ee5366a47d4da5f" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.532838 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mx7zg" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.534723 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611"} Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.536974 4876 generic.go:334] "Generic (PLEG): container finished" podID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerID="cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f" exitCode=0 Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.536999 4876 generic.go:334] "Generic (PLEG): container finished" podID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerID="dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573" exitCode=2 Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.537007 4876 generic.go:334] "Generic (PLEG): container finished" podID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerID="0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265" exitCode=0 Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.537026 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e81fc70-c9d6-4196-8a2b-cc110e37e18e","Type":"ContainerDied","Data":"cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f"} Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.537049 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e81fc70-c9d6-4196-8a2b-cc110e37e18e","Type":"ContainerDied","Data":"dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573"} Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.537058 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e81fc70-c9d6-4196-8a2b-cc110e37e18e","Type":"ContainerDied","Data":"0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265"} Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.537648 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7bffd5564f-rlt89"] Dec 15 07:13:28 crc kubenswrapper[4876]: E1215 07:13:28.537961 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaaef001-e178-4e64-80b1-e86fbd15ba8e" containerName="cinder-db-sync" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.537976 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaaef001-e178-4e64-80b1-e86fbd15ba8e" containerName="cinder-db-sync" Dec 15 07:13:28 crc kubenswrapper[4876]: E1215 07:13:28.537987 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88bc8b14-b04f-4062-b400-f8387184810e" containerName="barbican-db-sync" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.537994 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="88bc8b14-b04f-4062-b400-f8387184810e" containerName="barbican-db-sync" Dec 15 07:13:28 crc kubenswrapper[4876]: E1215 07:13:28.538006 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cbe0e15-c52e-43d4-986c-ace016ab598e" containerName="dnsmasq-dns" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.538012 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cbe0e15-c52e-43d4-986c-ace016ab598e" containerName="dnsmasq-dns" Dec 15 07:13:28 crc kubenswrapper[4876]: E1215 07:13:28.538021 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cbe0e15-c52e-43d4-986c-ace016ab598e" containerName="init" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.538026 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cbe0e15-c52e-43d4-986c-ace016ab598e" containerName="init" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.538184 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaaef001-e178-4e64-80b1-e86fbd15ba8e" containerName="cinder-db-sync" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.538209 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cbe0e15-c52e-43d4-986c-ace016ab598e" containerName="dnsmasq-dns" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.538232 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="88bc8b14-b04f-4062-b400-f8387184810e" containerName="barbican-db-sync" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.539015 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: W1215 07:13:28.541855 4876 reflector.go:561] object-"openstack"/"barbican-worker-config-data": failed to list *v1.Secret: secrets "barbican-worker-config-data" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 15 07:13:28 crc kubenswrapper[4876]: E1215 07:13:28.541902 4876 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"barbican-worker-config-data\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"barbican-worker-config-data\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.547326 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-hhgc7" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.547898 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.565078 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-56dc944b8b-zz758"] Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.567898 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.579381 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.624634 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7bffd5564f-rlt89"] Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.661168 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-56dc944b8b-zz758"] Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.693476 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-combined-ca-bundle\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.693555 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b7ba44e-d87f-4b10-a601-eb425af47a70-logs\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.693573 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data-custom\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.693594 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7e97610-516d-4609-911c-53124ace7db0-logs\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.693617 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2fg6\" (UniqueName: \"kubernetes.io/projected/3b7ba44e-d87f-4b10-a601-eb425af47a70-kube-api-access-s2fg6\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.693641 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qdrv\" (UniqueName: \"kubernetes.io/projected/f7e97610-516d-4609-911c-53124ace7db0-kube-api-access-7qdrv\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.693665 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.693685 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.693753 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data-custom\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.693778 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-combined-ca-bundle\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.723081 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bdf86f46f-p6jl4"] Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.724523 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.745763 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bdf86f46f-p6jl4"] Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795018 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2fg6\" (UniqueName: \"kubernetes.io/projected/3b7ba44e-d87f-4b10-a601-eb425af47a70-kube-api-access-s2fg6\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795077 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qdrv\" (UniqueName: \"kubernetes.io/projected/f7e97610-516d-4609-911c-53124ace7db0-kube-api-access-7qdrv\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795124 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795145 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795205 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data-custom\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795224 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-combined-ca-bundle\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795266 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-combined-ca-bundle\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795293 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b7ba44e-d87f-4b10-a601-eb425af47a70-logs\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795308 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data-custom\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795324 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7e97610-516d-4609-911c-53124ace7db0-logs\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.795698 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7e97610-516d-4609-911c-53124ace7db0-logs\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.796668 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b7ba44e-d87f-4b10-a601-eb425af47a70-logs\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.806315 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data-custom\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.807627 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.810623 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-combined-ca-bundle\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.810643 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-combined-ca-bundle\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.815201 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.838731 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qdrv\" (UniqueName: \"kubernetes.io/projected/f7e97610-516d-4609-911c-53124ace7db0-kube-api-access-7qdrv\") pod \"barbican-keystone-listener-56dc944b8b-zz758\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.864042 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7c7556c956-5vfzz"] Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.865345 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.869845 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.872754 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2fg6\" (UniqueName: \"kubernetes.io/projected/3b7ba44e-d87f-4b10-a601-eb425af47a70-kube-api-access-s2fg6\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.897086 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-swift-storage-0\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.897242 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-config\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.897276 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-svc\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.897335 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-nb\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.897575 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-sb\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.897582 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7c7556c956-5vfzz"] Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.897663 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87qsm\" (UniqueName: \"kubernetes.io/projected/5c355492-4805-4b7c-b569-2fe383b4224e-kube-api-access-87qsm\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:28 crc kubenswrapper[4876]: I1215 07:13:28.923741 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.003986 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87qsm\" (UniqueName: \"kubernetes.io/projected/5c355492-4805-4b7c-b569-2fe383b4224e-kube-api-access-87qsm\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.004368 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data-custom\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.004392 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-swift-storage-0\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.004415 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-config\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.004431 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-svc\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.004881 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-combined-ca-bundle\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.005032 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-nb\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.005232 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brslh\" (UniqueName: \"kubernetes.io/projected/b8fe802b-72f8-4fb3-b164-910e30a831e0-kube-api-access-brslh\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.005271 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8fe802b-72f8-4fb3-b164-910e30a831e0-logs\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.005325 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-sb\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.005380 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.005437 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-config\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.006156 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-nb\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.006375 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-sb\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.006471 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-swift-storage-0\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.006773 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-svc\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.036019 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87qsm\" (UniqueName: \"kubernetes.io/projected/5c355492-4805-4b7c-b569-2fe383b4224e-kube-api-access-87qsm\") pod \"dnsmasq-dns-7bdf86f46f-p6jl4\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.047860 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.072693 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.074031 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.078949 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.079038 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-c2vjp" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.079142 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.079795 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.106750 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brslh\" (UniqueName: \"kubernetes.io/projected/b8fe802b-72f8-4fb3-b164-910e30a831e0-kube-api-access-brslh\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.106790 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8fe802b-72f8-4fb3-b164-910e30a831e0-logs\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.106835 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.106901 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data-custom\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.106927 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-combined-ca-bundle\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.114521 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8fe802b-72f8-4fb3-b164-910e30a831e0-logs\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.124912 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.128871 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data-custom\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.129320 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-combined-ca-bundle\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.130325 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bdf86f46f-p6jl4"] Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.167090 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.191061 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brslh\" (UniqueName: \"kubernetes.io/projected/b8fe802b-72f8-4fb3-b164-910e30a831e0-kube-api-access-brslh\") pod \"barbican-api-7c7556c956-5vfzz\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.208173 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.208245 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-scripts\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.208290 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.208344 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.208376 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/21448337-e8ae-4c1b-8035-c688909333e6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.208391 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk9b6\" (UniqueName: \"kubernetes.io/projected/21448337-e8ae-4c1b-8035-c688909333e6-kube-api-access-nk9b6\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.217692 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75bfc9b94f-sh4l9"] Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.219053 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.252258 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.277865 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75bfc9b94f-sh4l9"] Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.315758 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.315868 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-nb\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.315931 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/21448337-e8ae-4c1b-8035-c688909333e6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.315956 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk9b6\" (UniqueName: \"kubernetes.io/projected/21448337-e8ae-4c1b-8035-c688909333e6-kube-api-access-nk9b6\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.316096 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.316697 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-swift-storage-0\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.316785 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-scripts\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.319880 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.322057 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/21448337-e8ae-4c1b-8035-c688909333e6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.323464 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.323523 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smdmn\" (UniqueName: \"kubernetes.io/projected/3224887d-8b3b-4228-814d-53e35a24b517-kube-api-access-smdmn\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.323555 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-config\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.323580 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-svc\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.323670 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-sb\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.335632 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.340696 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.340851 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-scripts\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.343119 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk9b6\" (UniqueName: \"kubernetes.io/projected/21448337-e8ae-4c1b-8035-c688909333e6-kube-api-access-nk9b6\") pod \"cinder-scheduler-0\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.345571 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.357402 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.360813 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.375088 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.402362 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.426607 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-swift-storage-0\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.426705 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smdmn\" (UniqueName: \"kubernetes.io/projected/3224887d-8b3b-4228-814d-53e35a24b517-kube-api-access-smdmn\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.426728 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-config\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.426763 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-svc\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.426802 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-sb\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.426965 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-nb\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.427604 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-swift-storage-0\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.428163 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-sb\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.428243 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-config\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.428675 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-svc\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.428972 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-nb\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.445272 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smdmn\" (UniqueName: \"kubernetes.io/projected/3224887d-8b3b-4228-814d-53e35a24b517-kube-api-access-smdmn\") pod \"dnsmasq-dns-75bfc9b94f-sh4l9\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.528553 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdnsd\" (UniqueName: \"kubernetes.io/projected/80da9abe-74d7-4e29-b459-ced2afb72ad8-kube-api-access-rdnsd\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.528856 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.528886 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-scripts\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.528918 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80da9abe-74d7-4e29-b459-ced2afb72ad8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.529084 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80da9abe-74d7-4e29-b459-ced2afb72ad8-logs\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.529157 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.529249 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data-custom\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.595301 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.630338 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data-custom\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.630398 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdnsd\" (UniqueName: \"kubernetes.io/projected/80da9abe-74d7-4e29-b459-ced2afb72ad8-kube-api-access-rdnsd\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.630433 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.630472 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-scripts\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.630498 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80da9abe-74d7-4e29-b459-ced2afb72ad8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.630577 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80da9abe-74d7-4e29-b459-ced2afb72ad8-logs\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.630598 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.630949 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80da9abe-74d7-4e29-b459-ced2afb72ad8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.631543 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80da9abe-74d7-4e29-b459-ced2afb72ad8-logs\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.635777 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-scripts\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.637702 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data-custom\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.638474 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.639918 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.657729 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-56dc944b8b-zz758"] Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.658876 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdnsd\" (UniqueName: \"kubernetes.io/projected/80da9abe-74d7-4e29-b459-ced2afb72ad8-kube-api-access-rdnsd\") pod \"cinder-api-0\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: W1215 07:13:29.664617 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7e97610_516d_4609_911c_53124ace7db0.slice/crio-66dd6fa899aeaeb83ada6d4a9fbf7fac24e229f085219e79458f7c009d6a5203 WatchSource:0}: Error finding container 66dd6fa899aeaeb83ada6d4a9fbf7fac24e229f085219e79458f7c009d6a5203: Status 404 returned error can't find the container with id 66dd6fa899aeaeb83ada6d4a9fbf7fac24e229f085219e79458f7c009d6a5203 Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.703668 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.803834 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bdf86f46f-p6jl4"] Dec 15 07:13:29 crc kubenswrapper[4876]: E1215 07:13:29.806447 4876 secret.go:188] Couldn't get secret openstack/barbican-worker-config-data: failed to sync secret cache: timed out waiting for the condition Dec 15 07:13:29 crc kubenswrapper[4876]: E1215 07:13:29.806504 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data-custom podName:3b7ba44e-d87f-4b10-a601-eb425af47a70 nodeName:}" failed. No retries permitted until 2025-12-15 07:13:30.306486991 +0000 UTC m=+1335.877629902 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data-custom") pod "barbican-worker-7bffd5564f-rlt89" (UID: "3b7ba44e-d87f-4b10-a601-eb425af47a70") : failed to sync secret cache: timed out waiting for the condition Dec 15 07:13:29 crc kubenswrapper[4876]: I1215 07:13:29.934896 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7c7556c956-5vfzz"] Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.004815 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.035215 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.134407 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75bfc9b94f-sh4l9"] Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.260748 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:13:30 crc kubenswrapper[4876]: W1215 07:13:30.334592 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80da9abe_74d7_4e29_b459_ced2afb72ad8.slice/crio-bb10ef012767a99f1ee7ba1fac7be044c20b4ba799c6c4cd1aba50b85215152a WatchSource:0}: Error finding container bb10ef012767a99f1ee7ba1fac7be044c20b4ba799c6c4cd1aba50b85215152a: Status 404 returned error can't find the container with id bb10ef012767a99f1ee7ba1fac7be044c20b4ba799c6c4cd1aba50b85215152a Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.347131 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data-custom\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.351618 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data-custom\") pod \"barbican-worker-7bffd5564f-rlt89\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.372300 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.564680 4876 generic.go:334] "Generic (PLEG): container finished" podID="3224887d-8b3b-4228-814d-53e35a24b517" containerID="435b67555b9c61685a3d9d0b17db2238a2413690482fe86306938f6d37e1d6fd" exitCode=0 Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.565074 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" event={"ID":"3224887d-8b3b-4228-814d-53e35a24b517","Type":"ContainerDied","Data":"435b67555b9c61685a3d9d0b17db2238a2413690482fe86306938f6d37e1d6fd"} Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.565831 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" event={"ID":"3224887d-8b3b-4228-814d-53e35a24b517","Type":"ContainerStarted","Data":"093151b45fdd74ae41d6e69182eede76d59b42e63e156f04e14a8be292885f92"} Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.567131 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" event={"ID":"f7e97610-516d-4609-911c-53124ace7db0","Type":"ContainerStarted","Data":"66dd6fa899aeaeb83ada6d4a9fbf7fac24e229f085219e79458f7c009d6a5203"} Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.569043 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"80da9abe-74d7-4e29-b459-ced2afb72ad8","Type":"ContainerStarted","Data":"bb10ef012767a99f1ee7ba1fac7be044c20b4ba799c6c4cd1aba50b85215152a"} Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.590465 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"21448337-e8ae-4c1b-8035-c688909333e6","Type":"ContainerStarted","Data":"e3afdfafb40a4f676f4ba4fa8f421771ac82648e9de264f160e6bb0202630f82"} Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.604493 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c7556c956-5vfzz" event={"ID":"b8fe802b-72f8-4fb3-b164-910e30a831e0","Type":"ContainerStarted","Data":"8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2"} Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.604551 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c7556c956-5vfzz" event={"ID":"b8fe802b-72f8-4fb3-b164-910e30a831e0","Type":"ContainerStarted","Data":"2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf"} Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.604568 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c7556c956-5vfzz" event={"ID":"b8fe802b-72f8-4fb3-b164-910e30a831e0","Type":"ContainerStarted","Data":"1a390fc0ad516c1456f81d2032ddcfe1b26df7679e83c7a62f5c6139688c7d96"} Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.604927 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.624176 4876 generic.go:334] "Generic (PLEG): container finished" podID="5c355492-4805-4b7c-b569-2fe383b4224e" containerID="b8202ca3886f5c8235ab5df587d411c3b87b513ad4d4099d6d97d66f7f79edf2" exitCode=0 Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.624220 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" event={"ID":"5c355492-4805-4b7c-b569-2fe383b4224e","Type":"ContainerDied","Data":"b8202ca3886f5c8235ab5df587d411c3b87b513ad4d4099d6d97d66f7f79edf2"} Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.624247 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" event={"ID":"5c355492-4805-4b7c-b569-2fe383b4224e","Type":"ContainerStarted","Data":"7af403d281966f7bb4e6257784e29fd6435fd3eca63cffd5ca5af5e533b74d62"} Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.675870 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7c7556c956-5vfzz" podStartSLOduration=2.675844308 podStartE2EDuration="2.675844308s" podCreationTimestamp="2025-12-15 07:13:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:30.631450927 +0000 UTC m=+1336.202593858" watchObservedRunningTime="2025-12-15 07:13:30.675844308 +0000 UTC m=+1336.246987219" Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.934957 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7bffd5564f-rlt89"] Dec 15 07:13:30 crc kubenswrapper[4876]: I1215 07:13:30.966624 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.084487 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87qsm\" (UniqueName: \"kubernetes.io/projected/5c355492-4805-4b7c-b569-2fe383b4224e-kube-api-access-87qsm\") pod \"5c355492-4805-4b7c-b569-2fe383b4224e\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.084561 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-nb\") pod \"5c355492-4805-4b7c-b569-2fe383b4224e\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.084616 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-sb\") pod \"5c355492-4805-4b7c-b569-2fe383b4224e\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.084650 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-swift-storage-0\") pod \"5c355492-4805-4b7c-b569-2fe383b4224e\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.084671 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-svc\") pod \"5c355492-4805-4b7c-b569-2fe383b4224e\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.084699 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-config\") pod \"5c355492-4805-4b7c-b569-2fe383b4224e\" (UID: \"5c355492-4805-4b7c-b569-2fe383b4224e\") " Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.167861 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c355492-4805-4b7c-b569-2fe383b4224e-kube-api-access-87qsm" (OuterVolumeSpecName: "kube-api-access-87qsm") pod "5c355492-4805-4b7c-b569-2fe383b4224e" (UID: "5c355492-4805-4b7c-b569-2fe383b4224e"). InnerVolumeSpecName "kube-api-access-87qsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.187555 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87qsm\" (UniqueName: \"kubernetes.io/projected/5c355492-4805-4b7c-b569-2fe383b4224e-kube-api-access-87qsm\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.188879 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5c355492-4805-4b7c-b569-2fe383b4224e" (UID: "5c355492-4805-4b7c-b569-2fe383b4224e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.244737 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5c355492-4805-4b7c-b569-2fe383b4224e" (UID: "5c355492-4805-4b7c-b569-2fe383b4224e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.282390 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-config" (OuterVolumeSpecName: "config") pod "5c355492-4805-4b7c-b569-2fe383b4224e" (UID: "5c355492-4805-4b7c-b569-2fe383b4224e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.289093 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5c355492-4805-4b7c-b569-2fe383b4224e" (UID: "5c355492-4805-4b7c-b569-2fe383b4224e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.294660 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.294692 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.294701 4876 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.294712 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.306070 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5c355492-4805-4b7c-b569-2fe383b4224e" (UID: "5c355492-4805-4b7c-b569-2fe383b4224e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.316328 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.396571 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c355492-4805-4b7c-b569-2fe383b4224e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.646170 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"80da9abe-74d7-4e29-b459-ced2afb72ad8","Type":"ContainerStarted","Data":"b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c"} Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.649891 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" event={"ID":"5c355492-4805-4b7c-b569-2fe383b4224e","Type":"ContainerDied","Data":"7af403d281966f7bb4e6257784e29fd6435fd3eca63cffd5ca5af5e533b74d62"} Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.649976 4876 scope.go:117] "RemoveContainer" containerID="b8202ca3886f5c8235ab5df587d411c3b87b513ad4d4099d6d97d66f7f79edf2" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.650268 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdf86f46f-p6jl4" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.662771 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" event={"ID":"3224887d-8b3b-4228-814d-53e35a24b517","Type":"ContainerStarted","Data":"f91bf6d15bf58d2343f264ef0f114bf34a71722864c55ea9bb590968e756e882"} Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.663427 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.669842 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bffd5564f-rlt89" event={"ID":"3b7ba44e-d87f-4b10-a601-eb425af47a70","Type":"ContainerStarted","Data":"6cf64ea9fa713416542750a45774c2028b76b5c52ae3b2b2bf1c9122e9bb5bc6"} Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.670160 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.697312 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" podStartSLOduration=2.697294695 podStartE2EDuration="2.697294695s" podCreationTimestamp="2025-12-15 07:13:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:31.683984554 +0000 UTC m=+1337.255127475" watchObservedRunningTime="2025-12-15 07:13:31.697294695 +0000 UTC m=+1337.268437606" Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.815684 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bdf86f46f-p6jl4"] Dec 15 07:13:31 crc kubenswrapper[4876]: I1215 07:13:31.823432 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bdf86f46f-p6jl4"] Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.565137 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.639831 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qw7sl\" (UniqueName: \"kubernetes.io/projected/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-kube-api-access-qw7sl\") pod \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.639921 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-config-data\") pod \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.639946 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-log-httpd\") pod \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.640007 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-combined-ca-bundle\") pod \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.640078 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-sg-core-conf-yaml\") pod \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.640151 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-scripts\") pod \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.640231 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-run-httpd\") pod \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\" (UID: \"9e81fc70-c9d6-4196-8a2b-cc110e37e18e\") " Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.640992 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9e81fc70-c9d6-4196-8a2b-cc110e37e18e" (UID: "9e81fc70-c9d6-4196-8a2b-cc110e37e18e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.641125 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9e81fc70-c9d6-4196-8a2b-cc110e37e18e" (UID: "9e81fc70-c9d6-4196-8a2b-cc110e37e18e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.644095 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-scripts" (OuterVolumeSpecName: "scripts") pod "9e81fc70-c9d6-4196-8a2b-cc110e37e18e" (UID: "9e81fc70-c9d6-4196-8a2b-cc110e37e18e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.652264 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-kube-api-access-qw7sl" (OuterVolumeSpecName: "kube-api-access-qw7sl") pod "9e81fc70-c9d6-4196-8a2b-cc110e37e18e" (UID: "9e81fc70-c9d6-4196-8a2b-cc110e37e18e"). InnerVolumeSpecName "kube-api-access-qw7sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.670179 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9e81fc70-c9d6-4196-8a2b-cc110e37e18e" (UID: "9e81fc70-c9d6-4196-8a2b-cc110e37e18e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.724560 4876 generic.go:334] "Generic (PLEG): container finished" podID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerID="86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa" exitCode=0 Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.724690 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.727326 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c355492-4805-4b7c-b569-2fe383b4224e" path="/var/lib/kubelet/pods/5c355492-4805-4b7c-b569-2fe383b4224e/volumes" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.727968 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="80da9abe-74d7-4e29-b459-ced2afb72ad8" containerName="cinder-api-log" containerID="cri-o://b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c" gracePeriod=30 Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.728373 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="80da9abe-74d7-4e29-b459-ced2afb72ad8" containerName="cinder-api" containerID="cri-o://bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806" gracePeriod=30 Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.733820 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e81fc70-c9d6-4196-8a2b-cc110e37e18e","Type":"ContainerDied","Data":"86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa"} Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.733970 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e81fc70-c9d6-4196-8a2b-cc110e37e18e","Type":"ContainerDied","Data":"d992f38d28181770a3a0d0881ab514a5b1c47f0a4c9e403ed0882c1e0935141e"} Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.733992 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.734033 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"80da9abe-74d7-4e29-b459-ced2afb72ad8","Type":"ContainerStarted","Data":"bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806"} Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.734052 4876 scope.go:117] "RemoveContainer" containerID="cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.736459 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"21448337-e8ae-4c1b-8035-c688909333e6","Type":"ContainerStarted","Data":"28e46eff02a2f94409c6a4727739c8b0e424e22e3ef53c871f4dfe8df0b86dde"} Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.743402 4876 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.743585 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.743747 4876 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.743910 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qw7sl\" (UniqueName: \"kubernetes.io/projected/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-kube-api-access-qw7sl\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.743967 4876 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.765383 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e81fc70-c9d6-4196-8a2b-cc110e37e18e" (UID: "9e81fc70-c9d6-4196-8a2b-cc110e37e18e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.771426 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.771406922 podStartE2EDuration="3.771406922s" podCreationTimestamp="2025-12-15 07:13:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:32.766348399 +0000 UTC m=+1338.337491320" watchObservedRunningTime="2025-12-15 07:13:32.771406922 +0000 UTC m=+1338.342549833" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.784701 4876 scope.go:117] "RemoveContainer" containerID="dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.795582 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-config-data" (OuterVolumeSpecName: "config-data") pod "9e81fc70-c9d6-4196-8a2b-cc110e37e18e" (UID: "9e81fc70-c9d6-4196-8a2b-cc110e37e18e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.846027 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:32 crc kubenswrapper[4876]: I1215 07:13:32.846054 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e81fc70-c9d6-4196-8a2b-cc110e37e18e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.120319 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.135217 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.143962 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.144905 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="ceilometer-notification-agent" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.144925 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="ceilometer-notification-agent" Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.145070 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="sg-core" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.145078 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="sg-core" Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.145148 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="proxy-httpd" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.145156 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="proxy-httpd" Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.145164 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c355492-4805-4b7c-b569-2fe383b4224e" containerName="init" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.145170 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c355492-4805-4b7c-b569-2fe383b4224e" containerName="init" Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.145178 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="ceilometer-central-agent" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.145185 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="ceilometer-central-agent" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.145471 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="sg-core" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.145487 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="ceilometer-notification-agent" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.145500 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="proxy-httpd" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.145512 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" containerName="ceilometer-central-agent" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.145533 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c355492-4805-4b7c-b569-2fe383b4224e" containerName="init" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.151541 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.161714 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.162045 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.184171 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.253030 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.253065 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-scripts\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.253174 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c5k9\" (UniqueName: \"kubernetes.io/projected/2a758edf-da39-4bb9-8e76-7fde55c93bda-kube-api-access-4c5k9\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.253192 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-log-httpd\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.253264 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-config-data\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.253323 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-run-httpd\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.253372 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.354893 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c5k9\" (UniqueName: \"kubernetes.io/projected/2a758edf-da39-4bb9-8e76-7fde55c93bda-kube-api-access-4c5k9\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.354937 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-log-httpd\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.354985 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-config-data\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.355038 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-run-httpd\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.355090 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.355178 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.355194 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-scripts\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.355497 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-log-httpd\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.357051 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-run-httpd\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.361274 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-scripts\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.362650 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.374170 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-config-data\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.375008 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c5k9\" (UniqueName: \"kubernetes.io/projected/2a758edf-da39-4bb9-8e76-7fde55c93bda-kube-api-access-4c5k9\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.377020 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.422298 4876 scope.go:117] "RemoveContainer" containerID="86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.480279 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.577340 4876 scope.go:117] "RemoveContainer" containerID="0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.628552 4876 scope.go:117] "RemoveContainer" containerID="cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f" Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.629211 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f\": container with ID starting with cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f not found: ID does not exist" containerID="cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.629243 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f"} err="failed to get container status \"cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f\": rpc error: code = NotFound desc = could not find container \"cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f\": container with ID starting with cbe8c9619f0878f1f304c487b143a16c101cb7606c8625ceebbaf43d2238898f not found: ID does not exist" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.629267 4876 scope.go:117] "RemoveContainer" containerID="dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573" Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.629600 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573\": container with ID starting with dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573 not found: ID does not exist" containerID="dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.629619 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573"} err="failed to get container status \"dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573\": rpc error: code = NotFound desc = could not find container \"dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573\": container with ID starting with dc12e2106917a2d61b68ede5908619a20664b9bf9fc20a171784d23852dc0573 not found: ID does not exist" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.629633 4876 scope.go:117] "RemoveContainer" containerID="86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa" Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.629909 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa\": container with ID starting with 86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa not found: ID does not exist" containerID="86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.629927 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa"} err="failed to get container status \"86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa\": rpc error: code = NotFound desc = could not find container \"86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa\": container with ID starting with 86ff5848c9f30d247796408764caf4bfb5d7e581b0b6ee7ef07057b144b034fa not found: ID does not exist" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.629943 4876 scope.go:117] "RemoveContainer" containerID="0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265" Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.630253 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265\": container with ID starting with 0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265 not found: ID does not exist" containerID="0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.630279 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265"} err="failed to get container status \"0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265\": rpc error: code = NotFound desc = could not find container \"0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265\": container with ID starting with 0f83ae2336b9b3b4808572c966838df0bbc0da132918955f89991100656a6265 not found: ID does not exist" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.749411 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" event={"ID":"f7e97610-516d-4609-911c-53124ace7db0","Type":"ContainerStarted","Data":"906e12a4e7207c08cbd8d84deaf12b6e44402bd7fad151f831fb9bbc8d378ea9"} Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.749784 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" event={"ID":"f7e97610-516d-4609-911c-53124ace7db0","Type":"ContainerStarted","Data":"409c22ba1b246c89eacddbba5e405955d8477034e987e355d0b9e54624d39f46"} Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.753416 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bffd5564f-rlt89" event={"ID":"3b7ba44e-d87f-4b10-a601-eb425af47a70","Type":"ContainerStarted","Data":"cd961a36fd04b1c02785e7dd50ada2e1ee1120caf0386cf15d358851e661646b"} Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.754515 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.755141 4876 generic.go:334] "Generic (PLEG): container finished" podID="80da9abe-74d7-4e29-b459-ced2afb72ad8" containerID="bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806" exitCode=0 Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.755162 4876 generic.go:334] "Generic (PLEG): container finished" podID="80da9abe-74d7-4e29-b459-ced2afb72ad8" containerID="b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c" exitCode=143 Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.755188 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"80da9abe-74d7-4e29-b459-ced2afb72ad8","Type":"ContainerDied","Data":"bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806"} Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.755204 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"80da9abe-74d7-4e29-b459-ced2afb72ad8","Type":"ContainerDied","Data":"b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c"} Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.755215 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"80da9abe-74d7-4e29-b459-ced2afb72ad8","Type":"ContainerDied","Data":"bb10ef012767a99f1ee7ba1fac7be044c20b4ba799c6c4cd1aba50b85215152a"} Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.755229 4876 scope.go:117] "RemoveContainer" containerID="bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.758065 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"21448337-e8ae-4c1b-8035-c688909333e6","Type":"ContainerStarted","Data":"58b96bc4de0412068c3a21551c83c8384a5cf12b7209f50f771fb98adedf91d0"} Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.762228 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-scripts\") pod \"80da9abe-74d7-4e29-b459-ced2afb72ad8\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.762283 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data-custom\") pod \"80da9abe-74d7-4e29-b459-ced2afb72ad8\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.762336 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdnsd\" (UniqueName: \"kubernetes.io/projected/80da9abe-74d7-4e29-b459-ced2afb72ad8-kube-api-access-rdnsd\") pod \"80da9abe-74d7-4e29-b459-ced2afb72ad8\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.768210 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80da9abe-74d7-4e29-b459-ced2afb72ad8-logs\") pod \"80da9abe-74d7-4e29-b459-ced2afb72ad8\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.768242 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-combined-ca-bundle\") pod \"80da9abe-74d7-4e29-b459-ced2afb72ad8\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.768352 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80da9abe-74d7-4e29-b459-ced2afb72ad8-etc-machine-id\") pod \"80da9abe-74d7-4e29-b459-ced2afb72ad8\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.768458 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data\") pod \"80da9abe-74d7-4e29-b459-ced2afb72ad8\" (UID: \"80da9abe-74d7-4e29-b459-ced2afb72ad8\") " Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.769864 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/80da9abe-74d7-4e29-b459-ced2afb72ad8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "80da9abe-74d7-4e29-b459-ced2afb72ad8" (UID: "80da9abe-74d7-4e29-b459-ced2afb72ad8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.771837 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" podStartSLOduration=2.8815274 podStartE2EDuration="5.77182103s" podCreationTimestamp="2025-12-15 07:13:28 +0000 UTC" firstStartedPulling="2025-12-15 07:13:29.667513359 +0000 UTC m=+1335.238656270" lastFinishedPulling="2025-12-15 07:13:32.557806989 +0000 UTC m=+1338.128949900" observedRunningTime="2025-12-15 07:13:33.770445587 +0000 UTC m=+1339.341588498" watchObservedRunningTime="2025-12-15 07:13:33.77182103 +0000 UTC m=+1339.342963941" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.772051 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80da9abe-74d7-4e29-b459-ced2afb72ad8-logs" (OuterVolumeSpecName: "logs") pod "80da9abe-74d7-4e29-b459-ced2afb72ad8" (UID: "80da9abe-74d7-4e29-b459-ced2afb72ad8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.775370 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80da9abe-74d7-4e29-b459-ced2afb72ad8-kube-api-access-rdnsd" (OuterVolumeSpecName: "kube-api-access-rdnsd") pod "80da9abe-74d7-4e29-b459-ced2afb72ad8" (UID: "80da9abe-74d7-4e29-b459-ced2afb72ad8"). InnerVolumeSpecName "kube-api-access-rdnsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.775479 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "80da9abe-74d7-4e29-b459-ced2afb72ad8" (UID: "80da9abe-74d7-4e29-b459-ced2afb72ad8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.796670 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-scripts" (OuterVolumeSpecName: "scripts") pod "80da9abe-74d7-4e29-b459-ced2afb72ad8" (UID: "80da9abe-74d7-4e29-b459-ced2afb72ad8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.821232 4876 scope.go:117] "RemoveContainer" containerID="b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.841512 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data" (OuterVolumeSpecName: "config-data") pod "80da9abe-74d7-4e29-b459-ced2afb72ad8" (UID: "80da9abe-74d7-4e29-b459-ced2afb72ad8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.850898 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80da9abe-74d7-4e29-b459-ced2afb72ad8" (UID: "80da9abe-74d7-4e29-b459-ced2afb72ad8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.861629 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.943915722 podStartE2EDuration="4.861609146s" podCreationTimestamp="2025-12-15 07:13:29 +0000 UTC" firstStartedPulling="2025-12-15 07:13:30.048922478 +0000 UTC m=+1335.620065389" lastFinishedPulling="2025-12-15 07:13:30.966615902 +0000 UTC m=+1336.537758813" observedRunningTime="2025-12-15 07:13:33.832286248 +0000 UTC m=+1339.403429169" watchObservedRunningTime="2025-12-15 07:13:33.861609146 +0000 UTC m=+1339.432752047" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.871246 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdnsd\" (UniqueName: \"kubernetes.io/projected/80da9abe-74d7-4e29-b459-ced2afb72ad8-kube-api-access-rdnsd\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.871271 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80da9abe-74d7-4e29-b459-ced2afb72ad8-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.871280 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.871288 4876 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/80da9abe-74d7-4e29-b459-ced2afb72ad8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.871298 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.871306 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.871315 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/80da9abe-74d7-4e29-b459-ced2afb72ad8-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.904619 4876 scope.go:117] "RemoveContainer" containerID="bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806" Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.930737 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806\": container with ID starting with bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806 not found: ID does not exist" containerID="bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.930840 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806"} err="failed to get container status \"bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806\": rpc error: code = NotFound desc = could not find container \"bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806\": container with ID starting with bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806 not found: ID does not exist" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.930927 4876 scope.go:117] "RemoveContainer" containerID="b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c" Dec 15 07:13:33 crc kubenswrapper[4876]: E1215 07:13:33.931291 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c\": container with ID starting with b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c not found: ID does not exist" containerID="b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.931318 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c"} err="failed to get container status \"b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c\": rpc error: code = NotFound desc = could not find container \"b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c\": container with ID starting with b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c not found: ID does not exist" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.931334 4876 scope.go:117] "RemoveContainer" containerID="bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.964689 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806"} err="failed to get container status \"bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806\": rpc error: code = NotFound desc = could not find container \"bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806\": container with ID starting with bb61eafc5a69b80860de6f50482875419d84141d269729dcd4d2e5cdbc5aa806 not found: ID does not exist" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.964732 4876 scope.go:117] "RemoveContainer" containerID="b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.965700 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c"} err="failed to get container status \"b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c\": rpc error: code = NotFound desc = could not find container \"b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c\": container with ID starting with b3556fb505eb2809ad28aa18a6f0338addde28cbba2998eeb1e09ad0199d249c not found: ID does not exist" Dec 15 07:13:33 crc kubenswrapper[4876]: I1215 07:13:33.994086 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.402484 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.715765 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e81fc70-c9d6-4196-8a2b-cc110e37e18e" path="/var/lib/kubelet/pods/9e81fc70-c9d6-4196-8a2b-cc110e37e18e/volumes" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.770835 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.774483 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a758edf-da39-4bb9-8e76-7fde55c93bda","Type":"ContainerStarted","Data":"b5acde85d6363bf114c8858cae589a62ad1df91aa6ddc4e01ab89ae35270674b"} Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.776449 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bffd5564f-rlt89" event={"ID":"3b7ba44e-d87f-4b10-a601-eb425af47a70","Type":"ContainerStarted","Data":"5d1b2d9f1bf51ee65493ed30ecd5e5bab31cd11e0866d010e9194c3abf215bce"} Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.806577 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7bffd5564f-rlt89" podStartSLOduration=4.345584341 podStartE2EDuration="6.806556217s" podCreationTimestamp="2025-12-15 07:13:28 +0000 UTC" firstStartedPulling="2025-12-15 07:13:30.961309974 +0000 UTC m=+1336.532452875" lastFinishedPulling="2025-12-15 07:13:33.42228184 +0000 UTC m=+1338.993424751" observedRunningTime="2025-12-15 07:13:34.793163224 +0000 UTC m=+1340.364306155" watchObservedRunningTime="2025-12-15 07:13:34.806556217 +0000 UTC m=+1340.377699138" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.821243 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.829549 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.843286 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:13:34 crc kubenswrapper[4876]: E1215 07:13:34.843704 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80da9abe-74d7-4e29-b459-ced2afb72ad8" containerName="cinder-api" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.843731 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="80da9abe-74d7-4e29-b459-ced2afb72ad8" containerName="cinder-api" Dec 15 07:13:34 crc kubenswrapper[4876]: E1215 07:13:34.843777 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80da9abe-74d7-4e29-b459-ced2afb72ad8" containerName="cinder-api-log" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.843785 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="80da9abe-74d7-4e29-b459-ced2afb72ad8" containerName="cinder-api-log" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.845784 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="80da9abe-74d7-4e29-b459-ced2afb72ad8" containerName="cinder-api" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.845819 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="80da9abe-74d7-4e29-b459-ced2afb72ad8" containerName="cinder-api-log" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.846676 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.849451 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.849596 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.850225 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.857225 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.900510 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.900876 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data-custom\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.901046 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.901254 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.901420 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-logs\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.901543 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzsm2\" (UniqueName: \"kubernetes.io/projected/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-kube-api-access-kzsm2\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.901650 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.901770 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:34 crc kubenswrapper[4876]: I1215 07:13:34.901889 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.003994 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.004132 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-logs\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.004188 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzsm2\" (UniqueName: \"kubernetes.io/projected/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-kube-api-access-kzsm2\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.004239 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.004298 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.004348 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.004452 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.004487 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data-custom\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.004574 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.004843 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.005580 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-logs\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.013017 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.014258 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data-custom\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.014658 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.015094 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.015226 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.024487 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.025655 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzsm2\" (UniqueName: \"kubernetes.io/projected/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-kube-api-access-kzsm2\") pod \"cinder-api-0\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.164522 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.485511 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-8774998dd-ps8vx"] Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.489326 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.494081 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.494370 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.509060 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-8774998dd-ps8vx"] Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.515791 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data-custom\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.515846 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-combined-ca-bundle\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.515905 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-logs\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.516026 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.516060 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ghhv\" (UniqueName: \"kubernetes.io/projected/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-kube-api-access-2ghhv\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.516232 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-internal-tls-certs\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.516279 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-public-tls-certs\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.619153 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.619485 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ghhv\" (UniqueName: \"kubernetes.io/projected/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-kube-api-access-2ghhv\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.619561 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-internal-tls-certs\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.619585 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-public-tls-certs\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.619633 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data-custom\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.619651 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-combined-ca-bundle\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.619680 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-logs\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.620099 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-logs\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.626814 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-internal-tls-certs\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.630357 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-public-tls-certs\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.631546 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.635707 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-combined-ca-bundle\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.639996 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data-custom\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.645171 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ghhv\" (UniqueName: \"kubernetes.io/projected/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-kube-api-access-2ghhv\") pod \"barbican-api-8774998dd-ps8vx\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.762973 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.784837 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c","Type":"ContainerStarted","Data":"20f681bac97da50dac7df49648f99624d7612e3cbad3be4f3eb9482344be9566"} Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.819391 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:35 crc kubenswrapper[4876]: I1215 07:13:35.860744 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:36 crc kubenswrapper[4876]: I1215 07:13:36.321143 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-8774998dd-ps8vx"] Dec 15 07:13:36 crc kubenswrapper[4876]: I1215 07:13:36.728203 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80da9abe-74d7-4e29-b459-ced2afb72ad8" path="/var/lib/kubelet/pods/80da9abe-74d7-4e29-b459-ced2afb72ad8/volumes" Dec 15 07:13:36 crc kubenswrapper[4876]: I1215 07:13:36.799481 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8774998dd-ps8vx" event={"ID":"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774","Type":"ContainerStarted","Data":"4df3f9d2120bea7d34cfae05a4f5df18902f5f757c6e24aab3dce17f1b9c115c"} Dec 15 07:13:36 crc kubenswrapper[4876]: I1215 07:13:36.799945 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8774998dd-ps8vx" event={"ID":"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774","Type":"ContainerStarted","Data":"83d2138d13a0ffbdeaef69ef7b046714395ed24e1202d23adc8389cc650b351f"} Dec 15 07:13:36 crc kubenswrapper[4876]: I1215 07:13:36.806373 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c","Type":"ContainerStarted","Data":"13335ca52da2d503c57740b99d9daf374ffc5018a3326ec34ac01d5b52c7213e"} Dec 15 07:13:36 crc kubenswrapper[4876]: I1215 07:13:36.808574 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a758edf-da39-4bb9-8e76-7fde55c93bda","Type":"ContainerStarted","Data":"6c20a3a734d34df3146fc3309ce72ef91b53ba753cbbd52442fd2949673c3227"} Dec 15 07:13:37 crc kubenswrapper[4876]: I1215 07:13:37.821705 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c","Type":"ContainerStarted","Data":"0adedfb29f59a78ed0ca95dedd5200ded03f6c4a9258dcf41996ef3271579c3d"} Dec 15 07:13:37 crc kubenswrapper[4876]: I1215 07:13:37.823377 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 15 07:13:37 crc kubenswrapper[4876]: I1215 07:13:37.827172 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a758edf-da39-4bb9-8e76-7fde55c93bda","Type":"ContainerStarted","Data":"092f4b9fcdd9016754928a6c90d75b9d3e80ddf38a92d06453459ca464a977ae"} Dec 15 07:13:37 crc kubenswrapper[4876]: I1215 07:13:37.837705 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8774998dd-ps8vx" event={"ID":"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774","Type":"ContainerStarted","Data":"ab2b7bcd77dc096c6f8bd4c255e23eb26198e47b55d43f5eb7d82a47546b3194"} Dec 15 07:13:37 crc kubenswrapper[4876]: I1215 07:13:37.842154 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:37 crc kubenswrapper[4876]: I1215 07:13:37.842287 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:37 crc kubenswrapper[4876]: I1215 07:13:37.861038 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.861014707 podStartE2EDuration="3.861014707s" podCreationTimestamp="2025-12-15 07:13:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:37.843937925 +0000 UTC m=+1343.415080856" watchObservedRunningTime="2025-12-15 07:13:37.861014707 +0000 UTC m=+1343.432157648" Dec 15 07:13:37 crc kubenswrapper[4876]: I1215 07:13:37.883917 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-8774998dd-ps8vx" podStartSLOduration=2.883899439 podStartE2EDuration="2.883899439s" podCreationTimestamp="2025-12-15 07:13:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:37.868806134 +0000 UTC m=+1343.439949055" watchObservedRunningTime="2025-12-15 07:13:37.883899439 +0000 UTC m=+1343.455042350" Dec 15 07:13:38 crc kubenswrapper[4876]: I1215 07:13:38.217249 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:13:38 crc kubenswrapper[4876]: I1215 07:13:38.306844 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6f6fb4f468-c4zbx"] Dec 15 07:13:38 crc kubenswrapper[4876]: I1215 07:13:38.307083 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6f6fb4f468-c4zbx" podUID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" containerName="neutron-api" containerID="cri-o://ed610484fe9156f9f3346af05c9736e7afab98e667a77ccff218365c06cb741a" gracePeriod=30 Dec 15 07:13:38 crc kubenswrapper[4876]: I1215 07:13:38.307389 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6f6fb4f468-c4zbx" podUID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" containerName="neutron-httpd" containerID="cri-o://5feb63996ef0003d1a572bf59e6097bdf6695e712ecbb2ad4f4a2e78e9088c8a" gracePeriod=30 Dec 15 07:13:38 crc kubenswrapper[4876]: I1215 07:13:38.847388 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a758edf-da39-4bb9-8e76-7fde55c93bda","Type":"ContainerStarted","Data":"136d151006468b3c38c1c557ed660b5d0976c92013ae49167e00b635fbbf1e7d"} Dec 15 07:13:38 crc kubenswrapper[4876]: I1215 07:13:38.849082 4876 generic.go:334] "Generic (PLEG): container finished" podID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" containerID="5feb63996ef0003d1a572bf59e6097bdf6695e712ecbb2ad4f4a2e78e9088c8a" exitCode=0 Dec 15 07:13:38 crc kubenswrapper[4876]: I1215 07:13:38.849146 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f6fb4f468-c4zbx" event={"ID":"f0d3517e-4f36-4dfb-a18a-73b63620b6cf","Type":"ContainerDied","Data":"5feb63996ef0003d1a572bf59e6097bdf6695e712ecbb2ad4f4a2e78e9088c8a"} Dec 15 07:13:39 crc kubenswrapper[4876]: I1215 07:13:39.598367 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:13:39 crc kubenswrapper[4876]: I1215 07:13:39.612349 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 15 07:13:39 crc kubenswrapper[4876]: I1215 07:13:39.664588 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b9c8b59c-8kw59"] Dec 15 07:13:39 crc kubenswrapper[4876]: I1215 07:13:39.664831 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" podUID="ad8a1b74-0614-4fb4-8f83-989c5a8b475e" containerName="dnsmasq-dns" containerID="cri-o://2fd2db1b9b1b6995a463ea1817502dd08769e677c2e02fb0e8f2fcd8d76c1cc3" gracePeriod=10 Dec 15 07:13:39 crc kubenswrapper[4876]: I1215 07:13:39.678018 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:13:39 crc kubenswrapper[4876]: I1215 07:13:39.897845 4876 generic.go:334] "Generic (PLEG): container finished" podID="ad8a1b74-0614-4fb4-8f83-989c5a8b475e" containerID="2fd2db1b9b1b6995a463ea1817502dd08769e677c2e02fb0e8f2fcd8d76c1cc3" exitCode=0 Dec 15 07:13:39 crc kubenswrapper[4876]: I1215 07:13:39.899020 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" event={"ID":"ad8a1b74-0614-4fb4-8f83-989c5a8b475e","Type":"ContainerDied","Data":"2fd2db1b9b1b6995a463ea1817502dd08769e677c2e02fb0e8f2fcd8d76c1cc3"} Dec 15 07:13:39 crc kubenswrapper[4876]: I1215 07:13:39.899255 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="21448337-e8ae-4c1b-8035-c688909333e6" containerName="cinder-scheduler" containerID="cri-o://28e46eff02a2f94409c6a4727739c8b0e424e22e3ef53c871f4dfe8df0b86dde" gracePeriod=30 Dec 15 07:13:39 crc kubenswrapper[4876]: I1215 07:13:39.899702 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="21448337-e8ae-4c1b-8035-c688909333e6" containerName="probe" containerID="cri-o://58b96bc4de0412068c3a21551c83c8384a5cf12b7209f50f771fb98adedf91d0" gracePeriod=30 Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.338981 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.441709 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-sb\") pod \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.442091 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-nb\") pod \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.442664 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-svc\") pod \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.442695 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lflh7\" (UniqueName: \"kubernetes.io/projected/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-kube-api-access-lflh7\") pod \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.442735 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-swift-storage-0\") pod \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.442876 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-config\") pod \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\" (UID: \"ad8a1b74-0614-4fb4-8f83-989c5a8b475e\") " Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.472057 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-kube-api-access-lflh7" (OuterVolumeSpecName: "kube-api-access-lflh7") pod "ad8a1b74-0614-4fb4-8f83-989c5a8b475e" (UID: "ad8a1b74-0614-4fb4-8f83-989c5a8b475e"). InnerVolumeSpecName "kube-api-access-lflh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.545919 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lflh7\" (UniqueName: \"kubernetes.io/projected/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-kube-api-access-lflh7\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.590875 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-config" (OuterVolumeSpecName: "config") pod "ad8a1b74-0614-4fb4-8f83-989c5a8b475e" (UID: "ad8a1b74-0614-4fb4-8f83-989c5a8b475e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.614144 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ad8a1b74-0614-4fb4-8f83-989c5a8b475e" (UID: "ad8a1b74-0614-4fb4-8f83-989c5a8b475e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.621754 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ad8a1b74-0614-4fb4-8f83-989c5a8b475e" (UID: "ad8a1b74-0614-4fb4-8f83-989c5a8b475e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.632563 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ad8a1b74-0614-4fb4-8f83-989c5a8b475e" (UID: "ad8a1b74-0614-4fb4-8f83-989c5a8b475e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.640508 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ad8a1b74-0614-4fb4-8f83-989c5a8b475e" (UID: "ad8a1b74-0614-4fb4-8f83-989c5a8b475e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.647362 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.647395 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.647406 4876 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.647415 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.647424 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ad8a1b74-0614-4fb4-8f83-989c5a8b475e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.907752 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a758edf-da39-4bb9-8e76-7fde55c93bda","Type":"ContainerStarted","Data":"d397fd101dfd610bcb8ff21143d208d1977d9f0ebdb8dfa8c8efc8672784faa7"} Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.908888 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.910552 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" event={"ID":"ad8a1b74-0614-4fb4-8f83-989c5a8b475e","Type":"ContainerDied","Data":"35e1ca7791d4598ab0b00970656d9a8bef977b4cacebf985b4b7aeb64123576e"} Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.910582 4876 scope.go:117] "RemoveContainer" containerID="2fd2db1b9b1b6995a463ea1817502dd08769e677c2e02fb0e8f2fcd8d76c1cc3" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.910676 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9c8b59c-8kw59" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.929064 4876 scope.go:117] "RemoveContainer" containerID="65c96db663a1d570b833d012a7e747aecfa41909ee101b29781a46429cfe9dca" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.936676 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.817254214 podStartE2EDuration="7.936659817s" podCreationTimestamp="2025-12-15 07:13:33 +0000 UTC" firstStartedPulling="2025-12-15 07:13:34.011221684 +0000 UTC m=+1339.582364595" lastFinishedPulling="2025-12-15 07:13:40.130627287 +0000 UTC m=+1345.701770198" observedRunningTime="2025-12-15 07:13:40.933173903 +0000 UTC m=+1346.504316814" watchObservedRunningTime="2025-12-15 07:13:40.936659817 +0000 UTC m=+1346.507802728" Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.961271 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b9c8b59c-8kw59"] Dec 15 07:13:40 crc kubenswrapper[4876]: I1215 07:13:40.968862 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b9c8b59c-8kw59"] Dec 15 07:13:41 crc kubenswrapper[4876]: I1215 07:13:41.120580 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:41 crc kubenswrapper[4876]: I1215 07:13:41.185935 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:13:41 crc kubenswrapper[4876]: I1215 07:13:41.392823 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:41 crc kubenswrapper[4876]: I1215 07:13:41.536850 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:41 crc kubenswrapper[4876]: I1215 07:13:41.929533 4876 generic.go:334] "Generic (PLEG): container finished" podID="21448337-e8ae-4c1b-8035-c688909333e6" containerID="58b96bc4de0412068c3a21551c83c8384a5cf12b7209f50f771fb98adedf91d0" exitCode=0 Dec 15 07:13:41 crc kubenswrapper[4876]: I1215 07:13:41.929591 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"21448337-e8ae-4c1b-8035-c688909333e6","Type":"ContainerDied","Data":"58b96bc4de0412068c3a21551c83c8384a5cf12b7209f50f771fb98adedf91d0"} Dec 15 07:13:41 crc kubenswrapper[4876]: I1215 07:13:41.944138 4876 generic.go:334] "Generic (PLEG): container finished" podID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" containerID="ed610484fe9156f9f3346af05c9736e7afab98e667a77ccff218365c06cb741a" exitCode=0 Dec 15 07:13:41 crc kubenswrapper[4876]: I1215 07:13:41.944254 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f6fb4f468-c4zbx" event={"ID":"f0d3517e-4f36-4dfb-a18a-73b63620b6cf","Type":"ContainerDied","Data":"ed610484fe9156f9f3346af05c9736e7afab98e667a77ccff218365c06cb741a"} Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.235376 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.378846 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-config\") pod \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.378985 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-httpd-config\") pod \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.379022 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57kwg\" (UniqueName: \"kubernetes.io/projected/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-kube-api-access-57kwg\") pod \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.379097 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-combined-ca-bundle\") pod \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.379153 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-ovndb-tls-certs\") pod \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.385982 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "f0d3517e-4f36-4dfb-a18a-73b63620b6cf" (UID: "f0d3517e-4f36-4dfb-a18a-73b63620b6cf"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.404378 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-kube-api-access-57kwg" (OuterVolumeSpecName: "kube-api-access-57kwg") pod "f0d3517e-4f36-4dfb-a18a-73b63620b6cf" (UID: "f0d3517e-4f36-4dfb-a18a-73b63620b6cf"). InnerVolumeSpecName "kube-api-access-57kwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.444566 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0d3517e-4f36-4dfb-a18a-73b63620b6cf" (UID: "f0d3517e-4f36-4dfb-a18a-73b63620b6cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:42 crc kubenswrapper[4876]: E1215 07:13:42.474448 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-config podName:f0d3517e-4f36-4dfb-a18a-73b63620b6cf nodeName:}" failed. No retries permitted until 2025-12-15 07:13:42.974424286 +0000 UTC m=+1348.545567197 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config" (UniqueName: "kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-config") pod "f0d3517e-4f36-4dfb-a18a-73b63620b6cf" (UID: "f0d3517e-4f36-4dfb-a18a-73b63620b6cf") : error deleting /var/lib/kubelet/pods/f0d3517e-4f36-4dfb-a18a-73b63620b6cf/volume-subpaths: remove /var/lib/kubelet/pods/f0d3517e-4f36-4dfb-a18a-73b63620b6cf/volume-subpaths: no such file or directory Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.478264 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "f0d3517e-4f36-4dfb-a18a-73b63620b6cf" (UID: "f0d3517e-4f36-4dfb-a18a-73b63620b6cf"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.480669 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.480696 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57kwg\" (UniqueName: \"kubernetes.io/projected/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-kube-api-access-57kwg\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.480706 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.480715 4876 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.722078 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad8a1b74-0614-4fb4-8f83-989c5a8b475e" path="/var/lib/kubelet/pods/ad8a1b74-0614-4fb4-8f83-989c5a8b475e/volumes" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.956942 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6f6fb4f468-c4zbx" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.956938 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6f6fb4f468-c4zbx" event={"ID":"f0d3517e-4f36-4dfb-a18a-73b63620b6cf","Type":"ContainerDied","Data":"471920c2286d867a03feb613896f167c821b6180fe39ce39d329eb6d602b57d7"} Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.957013 4876 scope.go:117] "RemoveContainer" containerID="5feb63996ef0003d1a572bf59e6097bdf6695e712ecbb2ad4f4a2e78e9088c8a" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.984087 4876 scope.go:117] "RemoveContainer" containerID="ed610484fe9156f9f3346af05c9736e7afab98e667a77ccff218365c06cb741a" Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.989084 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-config\") pod \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\" (UID: \"f0d3517e-4f36-4dfb-a18a-73b63620b6cf\") " Dec 15 07:13:42 crc kubenswrapper[4876]: I1215 07:13:42.995249 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-config" (OuterVolumeSpecName: "config") pod "f0d3517e-4f36-4dfb-a18a-73b63620b6cf" (UID: "f0d3517e-4f36-4dfb-a18a-73b63620b6cf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:43 crc kubenswrapper[4876]: I1215 07:13:43.093196 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0d3517e-4f36-4dfb-a18a-73b63620b6cf-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:43 crc kubenswrapper[4876]: I1215 07:13:43.292079 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6f6fb4f468-c4zbx"] Dec 15 07:13:43 crc kubenswrapper[4876]: I1215 07:13:43.300963 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6f6fb4f468-c4zbx"] Dec 15 07:13:43 crc kubenswrapper[4876]: I1215 07:13:43.974161 4876 generic.go:334] "Generic (PLEG): container finished" podID="21448337-e8ae-4c1b-8035-c688909333e6" containerID="28e46eff02a2f94409c6a4727739c8b0e424e22e3ef53c871f4dfe8df0b86dde" exitCode=0 Dec 15 07:13:43 crc kubenswrapper[4876]: I1215 07:13:43.974240 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"21448337-e8ae-4c1b-8035-c688909333e6","Type":"ContainerDied","Data":"28e46eff02a2f94409c6a4727739c8b0e424e22e3ef53c871f4dfe8df0b86dde"} Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.320018 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.417780 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data-custom\") pod \"21448337-e8ae-4c1b-8035-c688909333e6\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.417891 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-scripts\") pod \"21448337-e8ae-4c1b-8035-c688909333e6\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.417942 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nk9b6\" (UniqueName: \"kubernetes.io/projected/21448337-e8ae-4c1b-8035-c688909333e6-kube-api-access-nk9b6\") pod \"21448337-e8ae-4c1b-8035-c688909333e6\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.417991 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/21448337-e8ae-4c1b-8035-c688909333e6-etc-machine-id\") pod \"21448337-e8ae-4c1b-8035-c688909333e6\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.418122 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data\") pod \"21448337-e8ae-4c1b-8035-c688909333e6\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.418143 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-combined-ca-bundle\") pod \"21448337-e8ae-4c1b-8035-c688909333e6\" (UID: \"21448337-e8ae-4c1b-8035-c688909333e6\") " Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.426325 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/21448337-e8ae-4c1b-8035-c688909333e6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "21448337-e8ae-4c1b-8035-c688909333e6" (UID: "21448337-e8ae-4c1b-8035-c688909333e6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.472174 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-scripts" (OuterVolumeSpecName: "scripts") pod "21448337-e8ae-4c1b-8035-c688909333e6" (UID: "21448337-e8ae-4c1b-8035-c688909333e6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.472206 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21448337-e8ae-4c1b-8035-c688909333e6-kube-api-access-nk9b6" (OuterVolumeSpecName: "kube-api-access-nk9b6") pod "21448337-e8ae-4c1b-8035-c688909333e6" (UID: "21448337-e8ae-4c1b-8035-c688909333e6"). InnerVolumeSpecName "kube-api-access-nk9b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.476306 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "21448337-e8ae-4c1b-8035-c688909333e6" (UID: "21448337-e8ae-4c1b-8035-c688909333e6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.521172 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.521206 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.521216 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nk9b6\" (UniqueName: \"kubernetes.io/projected/21448337-e8ae-4c1b-8035-c688909333e6-kube-api-access-nk9b6\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.521227 4876 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/21448337-e8ae-4c1b-8035-c688909333e6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.531152 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21448337-e8ae-4c1b-8035-c688909333e6" (UID: "21448337-e8ae-4c1b-8035-c688909333e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.576460 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data" (OuterVolumeSpecName: "config-data") pod "21448337-e8ae-4c1b-8035-c688909333e6" (UID: "21448337-e8ae-4c1b-8035-c688909333e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.623384 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.623427 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21448337-e8ae-4c1b-8035-c688909333e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.720305 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" path="/var/lib/kubelet/pods/f0d3517e-4f36-4dfb-a18a-73b63620b6cf/volumes" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.907247 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.989556 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"21448337-e8ae-4c1b-8035-c688909333e6","Type":"ContainerDied","Data":"e3afdfafb40a4f676f4ba4fa8f421771ac82648e9de264f160e6bb0202630f82"} Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.989616 4876 scope.go:117] "RemoveContainer" containerID="58b96bc4de0412068c3a21551c83c8384a5cf12b7209f50f771fb98adedf91d0" Dec 15 07:13:44 crc kubenswrapper[4876]: I1215 07:13:44.989776 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.033023 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.040403 4876 scope.go:117] "RemoveContainer" containerID="28e46eff02a2f94409c6a4727739c8b0e424e22e3ef53c871f4dfe8df0b86dde" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.054394 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.064397 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:13:45 crc kubenswrapper[4876]: E1215 07:13:45.064733 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21448337-e8ae-4c1b-8035-c688909333e6" containerName="cinder-scheduler" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.064753 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="21448337-e8ae-4c1b-8035-c688909333e6" containerName="cinder-scheduler" Dec 15 07:13:45 crc kubenswrapper[4876]: E1215 07:13:45.064770 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" containerName="neutron-httpd" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.064777 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" containerName="neutron-httpd" Dec 15 07:13:45 crc kubenswrapper[4876]: E1215 07:13:45.064793 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad8a1b74-0614-4fb4-8f83-989c5a8b475e" containerName="init" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.064800 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad8a1b74-0614-4fb4-8f83-989c5a8b475e" containerName="init" Dec 15 07:13:45 crc kubenswrapper[4876]: E1215 07:13:45.064810 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad8a1b74-0614-4fb4-8f83-989c5a8b475e" containerName="dnsmasq-dns" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.064817 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad8a1b74-0614-4fb4-8f83-989c5a8b475e" containerName="dnsmasq-dns" Dec 15 07:13:45 crc kubenswrapper[4876]: E1215 07:13:45.064829 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" containerName="neutron-api" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.064835 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" containerName="neutron-api" Dec 15 07:13:45 crc kubenswrapper[4876]: E1215 07:13:45.064848 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21448337-e8ae-4c1b-8035-c688909333e6" containerName="probe" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.064856 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="21448337-e8ae-4c1b-8035-c688909333e6" containerName="probe" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.065025 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad8a1b74-0614-4fb4-8f83-989c5a8b475e" containerName="dnsmasq-dns" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.065043 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="21448337-e8ae-4c1b-8035-c688909333e6" containerName="cinder-scheduler" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.065065 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" containerName="neutron-httpd" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.065078 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="21448337-e8ae-4c1b-8035-c688909333e6" containerName="probe" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.065088 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0d3517e-4f36-4dfb-a18a-73b63620b6cf" containerName="neutron-api" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.066289 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.071471 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.094161 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.140223 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79b75337-8832-415e-a91f-2f8edd407cf1-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.140288 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdnqn\" (UniqueName: \"kubernetes.io/projected/79b75337-8832-415e-a91f-2f8edd407cf1-kube-api-access-jdnqn\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.140349 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.140373 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-scripts\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.140410 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.140440 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.243128 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdnqn\" (UniqueName: \"kubernetes.io/projected/79b75337-8832-415e-a91f-2f8edd407cf1-kube-api-access-jdnqn\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.243216 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.243241 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-scripts\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.243278 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.243307 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.243347 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79b75337-8832-415e-a91f-2f8edd407cf1-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.243449 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79b75337-8832-415e-a91f-2f8edd407cf1-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.250078 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.251214 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-scripts\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.253772 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.260013 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.275536 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdnqn\" (UniqueName: \"kubernetes.io/projected/79b75337-8832-415e-a91f-2f8edd407cf1-kube-api-access-jdnqn\") pod \"cinder-scheduler-0\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " pod="openstack/cinder-scheduler-0" Dec 15 07:13:45 crc kubenswrapper[4876]: I1215 07:13:45.401569 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 07:13:46 crc kubenswrapper[4876]: I1215 07:13:46.068230 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:13:46 crc kubenswrapper[4876]: I1215 07:13:46.719253 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21448337-e8ae-4c1b-8035-c688909333e6" path="/var/lib/kubelet/pods/21448337-e8ae-4c1b-8035-c688909333e6/volumes" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.032315 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"79b75337-8832-415e-a91f-2f8edd407cf1","Type":"ContainerStarted","Data":"b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783"} Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.032676 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"79b75337-8832-415e-a91f-2f8edd407cf1","Type":"ContainerStarted","Data":"5dce8b66da2c593d37c1adf1f95b57024e34e70055c7bf421ab38aaa9fe488ea"} Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.073776 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.075992 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.082475 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.082746 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-wjfnb" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.082864 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.108408 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.183505 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.183565 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9smdv\" (UniqueName: \"kubernetes.io/projected/e3ade031-559a-4a09-b707-ab081a659fc9-kube-api-access-9smdv\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.183698 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.183793 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config-secret\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.285701 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.285803 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config-secret\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.285833 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.285854 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9smdv\" (UniqueName: \"kubernetes.io/projected/e3ade031-559a-4a09-b707-ab081a659fc9-kube-api-access-9smdv\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.290959 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.306868 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config-secret\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.318834 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.330092 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9smdv\" (UniqueName: \"kubernetes.io/projected/e3ade031-559a-4a09-b707-ab081a659fc9-kube-api-access-9smdv\") pod \"openstackclient\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " pod="openstack/openstackclient" Dec 15 07:13:47 crc kubenswrapper[4876]: I1215 07:13:47.405259 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 15 07:13:48 crc kubenswrapper[4876]: I1215 07:13:47.991772 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 15 07:13:48 crc kubenswrapper[4876]: I1215 07:13:48.043358 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"79b75337-8832-415e-a91f-2f8edd407cf1","Type":"ContainerStarted","Data":"4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91"} Dec 15 07:13:48 crc kubenswrapper[4876]: I1215 07:13:48.044301 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e3ade031-559a-4a09-b707-ab081a659fc9","Type":"ContainerStarted","Data":"fc1b9920e6419316e2b6e6ab378929df2e9d4533ac0501e9d14d9c42d0e1464f"} Dec 15 07:13:48 crc kubenswrapper[4876]: I1215 07:13:48.065251 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.065236119 podStartE2EDuration="3.065236119s" podCreationTimestamp="2025-12-15 07:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:13:48.05989576 +0000 UTC m=+1353.631038671" watchObservedRunningTime="2025-12-15 07:13:48.065236119 +0000 UTC m=+1353.636379030" Dec 15 07:13:48 crc kubenswrapper[4876]: I1215 07:13:48.262985 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:48 crc kubenswrapper[4876]: I1215 07:13:48.293609 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:13:48 crc kubenswrapper[4876]: I1215 07:13:48.355847 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7c7556c956-5vfzz"] Dec 15 07:13:48 crc kubenswrapper[4876]: I1215 07:13:48.356313 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7c7556c956-5vfzz" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerName="barbican-api-log" containerID="cri-o://2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf" gracePeriod=30 Dec 15 07:13:48 crc kubenswrapper[4876]: I1215 07:13:48.356851 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7c7556c956-5vfzz" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerName="barbican-api" containerID="cri-o://8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2" gracePeriod=30 Dec 15 07:13:48 crc kubenswrapper[4876]: I1215 07:13:48.774208 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 15 07:13:49 crc kubenswrapper[4876]: I1215 07:13:49.078702 4876 generic.go:334] "Generic (PLEG): container finished" podID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerID="2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf" exitCode=143 Dec 15 07:13:49 crc kubenswrapper[4876]: I1215 07:13:49.078801 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c7556c956-5vfzz" event={"ID":"b8fe802b-72f8-4fb3-b164-910e30a831e0","Type":"ContainerDied","Data":"2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf"} Dec 15 07:13:50 crc kubenswrapper[4876]: I1215 07:13:50.401744 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 15 07:13:51 crc kubenswrapper[4876]: I1215 07:13:51.536245 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7c7556c956-5vfzz" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.156:9311/healthcheck\": read tcp 10.217.0.2:43394->10.217.0.156:9311: read: connection reset by peer" Dec 15 07:13:51 crc kubenswrapper[4876]: I1215 07:13:51.536322 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7c7556c956-5vfzz" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.156:9311/healthcheck\": read tcp 10.217.0.2:43404->10.217.0.156:9311: read: connection reset by peer" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.009675 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.111269 4876 generic.go:334] "Generic (PLEG): container finished" podID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerID="8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2" exitCode=0 Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.111386 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c7556c956-5vfzz" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.111313 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c7556c956-5vfzz" event={"ID":"b8fe802b-72f8-4fb3-b164-910e30a831e0","Type":"ContainerDied","Data":"8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2"} Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.111866 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c7556c956-5vfzz" event={"ID":"b8fe802b-72f8-4fb3-b164-910e30a831e0","Type":"ContainerDied","Data":"1a390fc0ad516c1456f81d2032ddcfe1b26df7679e83c7a62f5c6139688c7d96"} Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.111887 4876 scope.go:117] "RemoveContainer" containerID="8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.122743 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data-custom\") pod \"b8fe802b-72f8-4fb3-b164-910e30a831e0\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.122792 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brslh\" (UniqueName: \"kubernetes.io/projected/b8fe802b-72f8-4fb3-b164-910e30a831e0-kube-api-access-brslh\") pod \"b8fe802b-72f8-4fb3-b164-910e30a831e0\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.122858 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8fe802b-72f8-4fb3-b164-910e30a831e0-logs\") pod \"b8fe802b-72f8-4fb3-b164-910e30a831e0\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.122914 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data\") pod \"b8fe802b-72f8-4fb3-b164-910e30a831e0\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.123006 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-combined-ca-bundle\") pod \"b8fe802b-72f8-4fb3-b164-910e30a831e0\" (UID: \"b8fe802b-72f8-4fb3-b164-910e30a831e0\") " Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.124889 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8fe802b-72f8-4fb3-b164-910e30a831e0-logs" (OuterVolumeSpecName: "logs") pod "b8fe802b-72f8-4fb3-b164-910e30a831e0" (UID: "b8fe802b-72f8-4fb3-b164-910e30a831e0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.131844 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8fe802b-72f8-4fb3-b164-910e30a831e0-kube-api-access-brslh" (OuterVolumeSpecName: "kube-api-access-brslh") pod "b8fe802b-72f8-4fb3-b164-910e30a831e0" (UID: "b8fe802b-72f8-4fb3-b164-910e30a831e0"). InnerVolumeSpecName "kube-api-access-brslh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.133445 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b8fe802b-72f8-4fb3-b164-910e30a831e0" (UID: "b8fe802b-72f8-4fb3-b164-910e30a831e0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.139609 4876 scope.go:117] "RemoveContainer" containerID="2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.168497 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8fe802b-72f8-4fb3-b164-910e30a831e0" (UID: "b8fe802b-72f8-4fb3-b164-910e30a831e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.176480 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data" (OuterVolumeSpecName: "config-data") pod "b8fe802b-72f8-4fb3-b164-910e30a831e0" (UID: "b8fe802b-72f8-4fb3-b164-910e30a831e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.224973 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.225193 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.225251 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8fe802b-72f8-4fb3-b164-910e30a831e0-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.225322 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brslh\" (UniqueName: \"kubernetes.io/projected/b8fe802b-72f8-4fb3-b164-910e30a831e0-kube-api-access-brslh\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.225383 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8fe802b-72f8-4fb3-b164-910e30a831e0-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.237995 4876 scope.go:117] "RemoveContainer" containerID="8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2" Dec 15 07:13:52 crc kubenswrapper[4876]: E1215 07:13:52.238422 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2\": container with ID starting with 8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2 not found: ID does not exist" containerID="8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.238547 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2"} err="failed to get container status \"8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2\": rpc error: code = NotFound desc = could not find container \"8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2\": container with ID starting with 8f5cc19ad16bde26b6fcff066351e643a30eab3c73b15dc60a16bd8fa05d7dc2 not found: ID does not exist" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.238629 4876 scope.go:117] "RemoveContainer" containerID="2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf" Dec 15 07:13:52 crc kubenswrapper[4876]: E1215 07:13:52.238992 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf\": container with ID starting with 2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf not found: ID does not exist" containerID="2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.239068 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf"} err="failed to get container status \"2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf\": rpc error: code = NotFound desc = could not find container \"2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf\": container with ID starting with 2ce36302ede2d8a052d78ea6700b97c9d5f40e7d1ce166fef1335cf5ab03e6bf not found: ID does not exist" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.449237 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7c7556c956-5vfzz"] Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.466159 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7c7556c956-5vfzz"] Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.551727 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-7657c647b5-xksrd"] Dec 15 07:13:52 crc kubenswrapper[4876]: E1215 07:13:52.552066 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerName="barbican-api" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.552079 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerName="barbican-api" Dec 15 07:13:52 crc kubenswrapper[4876]: E1215 07:13:52.552114 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerName="barbican-api-log" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.552120 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerName="barbican-api-log" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.552277 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerName="barbican-api" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.552292 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" containerName="barbican-api-log" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.553136 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.556312 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.556679 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.556684 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.572684 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7657c647b5-xksrd"] Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.633422 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-etc-swift\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.633508 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-config-data\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.633636 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-internal-tls-certs\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.633656 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-combined-ca-bundle\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.633716 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whp4q\" (UniqueName: \"kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-kube-api-access-whp4q\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.633783 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-public-tls-certs\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.633868 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-log-httpd\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.633892 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-run-httpd\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.718041 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8fe802b-72f8-4fb3-b164-910e30a831e0" path="/var/lib/kubelet/pods/b8fe802b-72f8-4fb3-b164-910e30a831e0/volumes" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.735076 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-internal-tls-certs\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.735145 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-combined-ca-bundle\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.735203 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whp4q\" (UniqueName: \"kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-kube-api-access-whp4q\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.735251 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-public-tls-certs\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.735310 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-log-httpd\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.735342 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-run-httpd\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.735399 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-etc-swift\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.735441 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-config-data\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.736034 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-log-httpd\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.736090 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-run-httpd\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.742987 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-public-tls-certs\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.753043 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-internal-tls-certs\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.753208 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-combined-ca-bundle\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.753435 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-config-data\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.754209 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-etc-swift\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.765870 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whp4q\" (UniqueName: \"kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-kube-api-access-whp4q\") pod \"swift-proxy-7657c647b5-xksrd\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:52 crc kubenswrapper[4876]: I1215 07:13:52.868529 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:13:53 crc kubenswrapper[4876]: I1215 07:13:53.248799 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:13:53 crc kubenswrapper[4876]: I1215 07:13:53.249478 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="sg-core" containerID="cri-o://136d151006468b3c38c1c557ed660b5d0976c92013ae49167e00b635fbbf1e7d" gracePeriod=30 Dec 15 07:13:53 crc kubenswrapper[4876]: I1215 07:13:53.249569 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="ceilometer-notification-agent" containerID="cri-o://092f4b9fcdd9016754928a6c90d75b9d3e80ddf38a92d06453459ca464a977ae" gracePeriod=30 Dec 15 07:13:53 crc kubenswrapper[4876]: I1215 07:13:53.249625 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="proxy-httpd" containerID="cri-o://d397fd101dfd610bcb8ff21143d208d1977d9f0ebdb8dfa8c8efc8672784faa7" gracePeriod=30 Dec 15 07:13:53 crc kubenswrapper[4876]: I1215 07:13:53.249572 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="ceilometer-central-agent" containerID="cri-o://6c20a3a734d34df3146fc3309ce72ef91b53ba753cbbd52442fd2949673c3227" gracePeriod=30 Dec 15 07:13:53 crc kubenswrapper[4876]: I1215 07:13:53.258902 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.160:3000/\": EOF" Dec 15 07:13:53 crc kubenswrapper[4876]: I1215 07:13:53.479943 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7657c647b5-xksrd"] Dec 15 07:13:54 crc kubenswrapper[4876]: I1215 07:13:54.137046 4876 generic.go:334] "Generic (PLEG): container finished" podID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerID="d397fd101dfd610bcb8ff21143d208d1977d9f0ebdb8dfa8c8efc8672784faa7" exitCode=0 Dec 15 07:13:54 crc kubenswrapper[4876]: I1215 07:13:54.137082 4876 generic.go:334] "Generic (PLEG): container finished" podID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerID="136d151006468b3c38c1c557ed660b5d0976c92013ae49167e00b635fbbf1e7d" exitCode=2 Dec 15 07:13:54 crc kubenswrapper[4876]: I1215 07:13:54.137093 4876 generic.go:334] "Generic (PLEG): container finished" podID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerID="6c20a3a734d34df3146fc3309ce72ef91b53ba753cbbd52442fd2949673c3227" exitCode=0 Dec 15 07:13:54 crc kubenswrapper[4876]: I1215 07:13:54.137136 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a758edf-da39-4bb9-8e76-7fde55c93bda","Type":"ContainerDied","Data":"d397fd101dfd610bcb8ff21143d208d1977d9f0ebdb8dfa8c8efc8672784faa7"} Dec 15 07:13:54 crc kubenswrapper[4876]: I1215 07:13:54.137171 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a758edf-da39-4bb9-8e76-7fde55c93bda","Type":"ContainerDied","Data":"136d151006468b3c38c1c557ed660b5d0976c92013ae49167e00b635fbbf1e7d"} Dec 15 07:13:54 crc kubenswrapper[4876]: I1215 07:13:54.137187 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a758edf-da39-4bb9-8e76-7fde55c93bda","Type":"ContainerDied","Data":"6c20a3a734d34df3146fc3309ce72ef91b53ba753cbbd52442fd2949673c3227"} Dec 15 07:13:55 crc kubenswrapper[4876]: I1215 07:13:55.655056 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 15 07:13:56 crc kubenswrapper[4876]: I1215 07:13:56.158816 4876 generic.go:334] "Generic (PLEG): container finished" podID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerID="092f4b9fcdd9016754928a6c90d75b9d3e80ddf38a92d06453459ca464a977ae" exitCode=0 Dec 15 07:13:56 crc kubenswrapper[4876]: I1215 07:13:56.158888 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a758edf-da39-4bb9-8e76-7fde55c93bda","Type":"ContainerDied","Data":"092f4b9fcdd9016754928a6c90d75b9d3e80ddf38a92d06453459ca464a977ae"} Dec 15 07:13:59 crc kubenswrapper[4876]: W1215 07:13:59.355344 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod769bb0f8_96fe_485a_adfd_e51747bbff86.slice/crio-ab9e566345df24e380959f893a007c698b900501f2115b87085d4c1e4ece3b84 WatchSource:0}: Error finding container ab9e566345df24e380959f893a007c698b900501f2115b87085d4c1e4ece3b84: Status 404 returned error can't find the container with id ab9e566345df24e380959f893a007c698b900501f2115b87085d4c1e4ece3b84 Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.682145 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.772595 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4c5k9\" (UniqueName: \"kubernetes.io/projected/2a758edf-da39-4bb9-8e76-7fde55c93bda-kube-api-access-4c5k9\") pod \"2a758edf-da39-4bb9-8e76-7fde55c93bda\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.772742 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-log-httpd\") pod \"2a758edf-da39-4bb9-8e76-7fde55c93bda\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.772818 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-config-data\") pod \"2a758edf-da39-4bb9-8e76-7fde55c93bda\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.773011 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-sg-core-conf-yaml\") pod \"2a758edf-da39-4bb9-8e76-7fde55c93bda\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.773072 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-combined-ca-bundle\") pod \"2a758edf-da39-4bb9-8e76-7fde55c93bda\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.773193 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-run-httpd\") pod \"2a758edf-da39-4bb9-8e76-7fde55c93bda\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.773242 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-scripts\") pod \"2a758edf-da39-4bb9-8e76-7fde55c93bda\" (UID: \"2a758edf-da39-4bb9-8e76-7fde55c93bda\") " Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.773337 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2a758edf-da39-4bb9-8e76-7fde55c93bda" (UID: "2a758edf-da39-4bb9-8e76-7fde55c93bda"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.774056 4876 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.774822 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2a758edf-da39-4bb9-8e76-7fde55c93bda" (UID: "2a758edf-da39-4bb9-8e76-7fde55c93bda"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.777202 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a758edf-da39-4bb9-8e76-7fde55c93bda-kube-api-access-4c5k9" (OuterVolumeSpecName: "kube-api-access-4c5k9") pod "2a758edf-da39-4bb9-8e76-7fde55c93bda" (UID: "2a758edf-da39-4bb9-8e76-7fde55c93bda"). InnerVolumeSpecName "kube-api-access-4c5k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.777833 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-scripts" (OuterVolumeSpecName: "scripts") pod "2a758edf-da39-4bb9-8e76-7fde55c93bda" (UID: "2a758edf-da39-4bb9-8e76-7fde55c93bda"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.805243 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2a758edf-da39-4bb9-8e76-7fde55c93bda" (UID: "2a758edf-da39-4bb9-8e76-7fde55c93bda"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.859238 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a758edf-da39-4bb9-8e76-7fde55c93bda" (UID: "2a758edf-da39-4bb9-8e76-7fde55c93bda"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.875703 4876 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.875739 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.875751 4876 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a758edf-da39-4bb9-8e76-7fde55c93bda-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.875766 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.875781 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4c5k9\" (UniqueName: \"kubernetes.io/projected/2a758edf-da39-4bb9-8e76-7fde55c93bda-kube-api-access-4c5k9\") on node \"crc\" DevicePath \"\"" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.891223 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-config-data" (OuterVolumeSpecName: "config-data") pod "2a758edf-da39-4bb9-8e76-7fde55c93bda" (UID: "2a758edf-da39-4bb9-8e76-7fde55c93bda"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:13:59 crc kubenswrapper[4876]: I1215 07:13:59.977483 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a758edf-da39-4bb9-8e76-7fde55c93bda-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.198697 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e3ade031-559a-4a09-b707-ab081a659fc9","Type":"ContainerStarted","Data":"5984af4749e3e3b071be6cbab85aa2dc4f95eb610ca9d34223b709f90b67f795"} Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.200878 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7657c647b5-xksrd" event={"ID":"769bb0f8-96fe-485a-adfd-e51747bbff86","Type":"ContainerStarted","Data":"39f90703ccba40f7e2a435957a70f4bff9580120e427f263883cc1f2fc48d1e8"} Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.200918 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7657c647b5-xksrd" event={"ID":"769bb0f8-96fe-485a-adfd-e51747bbff86","Type":"ContainerStarted","Data":"476fd12dd3cb5e2c27a25b26e39e28f8bb6e1c10090223c4ed86166489cea477"} Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.200929 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7657c647b5-xksrd" event={"ID":"769bb0f8-96fe-485a-adfd-e51747bbff86","Type":"ContainerStarted","Data":"ab9e566345df24e380959f893a007c698b900501f2115b87085d4c1e4ece3b84"} Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.201029 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.201059 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.204328 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a758edf-da39-4bb9-8e76-7fde55c93bda","Type":"ContainerDied","Data":"b5acde85d6363bf114c8858cae589a62ad1df91aa6ddc4e01ab89ae35270674b"} Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.204375 4876 scope.go:117] "RemoveContainer" containerID="d397fd101dfd610bcb8ff21143d208d1977d9f0ebdb8dfa8c8efc8672784faa7" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.204528 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.222221 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.768926743 podStartE2EDuration="13.222206298s" podCreationTimestamp="2025-12-15 07:13:47 +0000 UTC" firstStartedPulling="2025-12-15 07:13:48.00226559 +0000 UTC m=+1353.573408501" lastFinishedPulling="2025-12-15 07:13:59.455545145 +0000 UTC m=+1365.026688056" observedRunningTime="2025-12-15 07:14:00.219357643 +0000 UTC m=+1365.790500574" watchObservedRunningTime="2025-12-15 07:14:00.222206298 +0000 UTC m=+1365.793349219" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.239222 4876 scope.go:117] "RemoveContainer" containerID="136d151006468b3c38c1c557ed660b5d0976c92013ae49167e00b635fbbf1e7d" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.266891 4876 scope.go:117] "RemoveContainer" containerID="092f4b9fcdd9016754928a6c90d75b9d3e80ddf38a92d06453459ca464a977ae" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.267902 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-7657c647b5-xksrd" podStartSLOduration=8.267879669 podStartE2EDuration="8.267879669s" podCreationTimestamp="2025-12-15 07:13:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:14:00.258451269 +0000 UTC m=+1365.829594190" watchObservedRunningTime="2025-12-15 07:14:00.267879669 +0000 UTC m=+1365.839022580" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.294521 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.313160 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.315496 4876 scope.go:117] "RemoveContainer" containerID="6c20a3a734d34df3146fc3309ce72ef91b53ba753cbbd52442fd2949673c3227" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.327147 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:00 crc kubenswrapper[4876]: E1215 07:14:00.327499 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="ceilometer-central-agent" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.327515 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="ceilometer-central-agent" Dec 15 07:14:00 crc kubenswrapper[4876]: E1215 07:14:00.327530 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="ceilometer-notification-agent" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.327535 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="ceilometer-notification-agent" Dec 15 07:14:00 crc kubenswrapper[4876]: E1215 07:14:00.327560 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="proxy-httpd" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.327566 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="proxy-httpd" Dec 15 07:14:00 crc kubenswrapper[4876]: E1215 07:14:00.327576 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="sg-core" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.327582 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="sg-core" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.327744 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="proxy-httpd" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.327758 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="ceilometer-notification-agent" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.327768 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="ceilometer-central-agent" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.327784 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" containerName="sg-core" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.329315 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.331908 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.332402 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.336881 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.384459 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q25c\" (UniqueName: \"kubernetes.io/projected/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-kube-api-access-2q25c\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.384505 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-scripts\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.384536 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-run-httpd\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.384570 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.384624 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-log-httpd\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.384667 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.384725 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-config-data\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.486178 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-config-data\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.486246 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q25c\" (UniqueName: \"kubernetes.io/projected/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-kube-api-access-2q25c\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.486273 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-scripts\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.486297 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-run-httpd\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.486331 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.486386 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-log-httpd\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.486429 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.487161 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-log-httpd\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.487475 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-run-httpd\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.491883 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.492429 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-config-data\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.492582 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.493301 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-scripts\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.502475 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q25c\" (UniqueName: \"kubernetes.io/projected/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-kube-api-access-2q25c\") pod \"ceilometer-0\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " pod="openstack/ceilometer-0" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.715919 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a758edf-da39-4bb9-8e76-7fde55c93bda" path="/var/lib/kubelet/pods/2a758edf-da39-4bb9-8e76-7fde55c93bda/volumes" Dec 15 07:14:00 crc kubenswrapper[4876]: I1215 07:14:00.718137 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:01 crc kubenswrapper[4876]: I1215 07:14:01.349974 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:01 crc kubenswrapper[4876]: W1215 07:14:01.365236 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff1a4dc8_dce2_44fd_a9f1_7bec30b92a9d.slice/crio-fda211ec451e132ab210e68b01d4b186fc84ed2817a84e88d2616a336c338d76 WatchSource:0}: Error finding container fda211ec451e132ab210e68b01d4b186fc84ed2817a84e88d2616a336c338d76: Status 404 returned error can't find the container with id fda211ec451e132ab210e68b01d4b186fc84ed2817a84e88d2616a336c338d76 Dec 15 07:14:01 crc kubenswrapper[4876]: I1215 07:14:01.419516 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:14:01 crc kubenswrapper[4876]: I1215 07:14:01.419971 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" containerName="glance-log" containerID="cri-o://5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b" gracePeriod=30 Dec 15 07:14:01 crc kubenswrapper[4876]: I1215 07:14:01.420083 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" containerName="glance-httpd" containerID="cri-o://64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a" gracePeriod=30 Dec 15 07:14:01 crc kubenswrapper[4876]: I1215 07:14:01.763464 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:02 crc kubenswrapper[4876]: I1215 07:14:02.236975 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d","Type":"ContainerStarted","Data":"827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d"} Dec 15 07:14:02 crc kubenswrapper[4876]: I1215 07:14:02.237768 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d","Type":"ContainerStarted","Data":"fda211ec451e132ab210e68b01d4b186fc84ed2817a84e88d2616a336c338d76"} Dec 15 07:14:02 crc kubenswrapper[4876]: I1215 07:14:02.239938 4876 generic.go:334] "Generic (PLEG): container finished" podID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" containerID="5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b" exitCode=143 Dec 15 07:14:02 crc kubenswrapper[4876]: I1215 07:14:02.240026 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0","Type":"ContainerDied","Data":"5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b"} Dec 15 07:14:03 crc kubenswrapper[4876]: I1215 07:14:03.252679 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d","Type":"ContainerStarted","Data":"51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b"} Dec 15 07:14:04 crc kubenswrapper[4876]: I1215 07:14:04.274170 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d","Type":"ContainerStarted","Data":"de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225"} Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.194436 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.287347 4876 generic.go:334] "Generic (PLEG): container finished" podID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" containerID="64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a" exitCode=0 Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.287388 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0","Type":"ContainerDied","Data":"64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a"} Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.287417 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0","Type":"ContainerDied","Data":"6e44b3a71e7d63692d6279cee0b8cc2ce7fad37b09a07d1d7f9930ab1f5fc512"} Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.287438 4876 scope.go:117] "RemoveContainer" containerID="64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.287643 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.315402 4876 scope.go:117] "RemoveContainer" containerID="5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.340973 4876 scope.go:117] "RemoveContainer" containerID="64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a" Dec 15 07:14:05 crc kubenswrapper[4876]: E1215 07:14:05.341332 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a\": container with ID starting with 64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a not found: ID does not exist" containerID="64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.341361 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a"} err="failed to get container status \"64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a\": rpc error: code = NotFound desc = could not find container \"64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a\": container with ID starting with 64f54a93ed354a47d4a3dc8a92bb258bfa53df989adc9347aa0433a93c6bb89a not found: ID does not exist" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.341379 4876 scope.go:117] "RemoveContainer" containerID="5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b" Dec 15 07:14:05 crc kubenswrapper[4876]: E1215 07:14:05.341581 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b\": container with ID starting with 5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b not found: ID does not exist" containerID="5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.341596 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b"} err="failed to get container status \"5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b\": rpc error: code = NotFound desc = could not find container \"5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b\": container with ID starting with 5f9efb7f77373ce6a9bf8e7771644d83afd26659202f761c1becdc39625d915b not found: ID does not exist" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.390307 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-config-data\") pod \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.390574 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.390677 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndncb\" (UniqueName: \"kubernetes.io/projected/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-kube-api-access-ndncb\") pod \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.390864 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-httpd-run\") pod \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.390951 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-combined-ca-bundle\") pod \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.391018 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-internal-tls-certs\") pod \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.391120 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-scripts\") pod \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.391184 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-logs\") pod \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\" (UID: \"ba07dd8d-d3e0-4981-a0d4-1339fdb451c0\") " Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.391261 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" (UID: "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.391695 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.392082 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-logs" (OuterVolumeSpecName: "logs") pod "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" (UID: "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.397410 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" (UID: "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.398292 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-kube-api-access-ndncb" (OuterVolumeSpecName: "kube-api-access-ndncb") pod "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" (UID: "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0"). InnerVolumeSpecName "kube-api-access-ndncb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.402214 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-scripts" (OuterVolumeSpecName: "scripts") pod "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" (UID: "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.431224 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" (UID: "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.446176 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-config-data" (OuterVolumeSpecName: "config-data") pod "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" (UID: "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.451446 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" (UID: "ba07dd8d-d3e0-4981-a0d4-1339fdb451c0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.494364 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.494423 4876 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.494434 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.494449 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.494479 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.494508 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.494522 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndncb\" (UniqueName: \"kubernetes.io/projected/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0-kube-api-access-ndncb\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.518849 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.597275 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.619548 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.639516 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.647820 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:14:05 crc kubenswrapper[4876]: E1215 07:14:05.648308 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" containerName="glance-log" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.648328 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" containerName="glance-log" Dec 15 07:14:05 crc kubenswrapper[4876]: E1215 07:14:05.648342 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" containerName="glance-httpd" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.648349 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" containerName="glance-httpd" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.648516 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" containerName="glance-httpd" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.648545 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" containerName="glance-log" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.649521 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.651677 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.652024 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.656550 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.800393 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.800523 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-logs\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.800572 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.800668 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.800736 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.800776 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.800813 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.801019 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dhnx\" (UniqueName: \"kubernetes.io/projected/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-kube-api-access-8dhnx\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.902445 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dhnx\" (UniqueName: \"kubernetes.io/projected/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-kube-api-access-8dhnx\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.902499 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.902551 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-logs\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.902582 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.902621 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.902645 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.902666 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.902689 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.903846 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.903868 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.907412 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-logs\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.909679 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.911231 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.912276 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.913740 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.929004 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dhnx\" (UniqueName: \"kubernetes.io/projected/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-kube-api-access-8dhnx\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.946563 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " pod="openstack/glance-default-internal-api-0" Dec 15 07:14:05 crc kubenswrapper[4876]: I1215 07:14:05.966529 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.298856 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d","Type":"ContainerStarted","Data":"c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c"} Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.299212 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.299054 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="ceilometer-central-agent" containerID="cri-o://827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d" gracePeriod=30 Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.299047 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="proxy-httpd" containerID="cri-o://c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c" gracePeriod=30 Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.299136 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="ceilometer-notification-agent" containerID="cri-o://51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b" gracePeriod=30 Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.299097 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="sg-core" containerID="cri-o://de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225" gracePeriod=30 Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.321564 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.6372994910000003 podStartE2EDuration="6.321543199s" podCreationTimestamp="2025-12-15 07:14:00 +0000 UTC" firstStartedPulling="2025-12-15 07:14:01.367878075 +0000 UTC m=+1366.939020986" lastFinishedPulling="2025-12-15 07:14:05.052121783 +0000 UTC m=+1370.623264694" observedRunningTime="2025-12-15 07:14:06.318433535 +0000 UTC m=+1371.889576456" watchObservedRunningTime="2025-12-15 07:14:06.321543199 +0000 UTC m=+1371.892686130" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.562269 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-2ht2h"] Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.563365 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2ht2h" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.586833 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.614235 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-2ht2h"] Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.717735 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6b6b\" (UniqueName: \"kubernetes.io/projected/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-kube-api-access-c6b6b\") pod \"nova-api-db-create-2ht2h\" (UID: \"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd\") " pod="openstack/nova-api-db-create-2ht2h" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.717797 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-operator-scripts\") pod \"nova-api-db-create-2ht2h\" (UID: \"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd\") " pod="openstack/nova-api-db-create-2ht2h" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.734609 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba07dd8d-d3e0-4981-a0d4-1339fdb451c0" path="/var/lib/kubelet/pods/ba07dd8d-d3e0-4981-a0d4-1339fdb451c0/volumes" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.735405 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-cfvxh"] Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.736832 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cfvxh" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.766084 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-c196-account-create-update-629fc"] Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.768943 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c196-account-create-update-629fc" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.772442 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.778750 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-cfvxh"] Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.812970 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c196-account-create-update-629fc"] Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.823097 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6b6b\" (UniqueName: \"kubernetes.io/projected/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-kube-api-access-c6b6b\") pod \"nova-api-db-create-2ht2h\" (UID: \"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd\") " pod="openstack/nova-api-db-create-2ht2h" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.823170 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-operator-scripts\") pod \"nova-api-db-create-2ht2h\" (UID: \"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd\") " pod="openstack/nova-api-db-create-2ht2h" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.827422 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-operator-scripts\") pod \"nova-api-db-create-2ht2h\" (UID: \"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd\") " pod="openstack/nova-api-db-create-2ht2h" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.869121 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6b6b\" (UniqueName: \"kubernetes.io/projected/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-kube-api-access-c6b6b\") pod \"nova-api-db-create-2ht2h\" (UID: \"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd\") " pod="openstack/nova-api-db-create-2ht2h" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.924515 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2ht2h" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.929165 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pflfm\" (UniqueName: \"kubernetes.io/projected/56ef7f70-9cff-4db0-a699-f6b3496ed677-kube-api-access-pflfm\") pod \"nova-api-c196-account-create-update-629fc\" (UID: \"56ef7f70-9cff-4db0-a699-f6b3496ed677\") " pod="openstack/nova-api-c196-account-create-update-629fc" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.929282 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56ef7f70-9cff-4db0-a699-f6b3496ed677-operator-scripts\") pod \"nova-api-c196-account-create-update-629fc\" (UID: \"56ef7f70-9cff-4db0-a699-f6b3496ed677\") " pod="openstack/nova-api-c196-account-create-update-629fc" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.929374 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zzvp\" (UniqueName: \"kubernetes.io/projected/72dd7822-885f-4b25-9843-39d598af3697-kube-api-access-4zzvp\") pod \"nova-cell0-db-create-cfvxh\" (UID: \"72dd7822-885f-4b25-9843-39d598af3697\") " pod="openstack/nova-cell0-db-create-cfvxh" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.929400 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72dd7822-885f-4b25-9843-39d598af3697-operator-scripts\") pod \"nova-cell0-db-create-cfvxh\" (UID: \"72dd7822-885f-4b25-9843-39d598af3697\") " pod="openstack/nova-cell0-db-create-cfvxh" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.938241 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-pkjmb"] Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.939334 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pkjmb" Dec 15 07:14:06 crc kubenswrapper[4876]: I1215 07:14:06.980608 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pkjmb"] Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.031181 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-d7a8-account-create-update-9kn7n"] Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.032558 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.036278 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d7a8-account-create-update-9kn7n"] Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.039701 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.042201 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pflfm\" (UniqueName: \"kubernetes.io/projected/56ef7f70-9cff-4db0-a699-f6b3496ed677-kube-api-access-pflfm\") pod \"nova-api-c196-account-create-update-629fc\" (UID: \"56ef7f70-9cff-4db0-a699-f6b3496ed677\") " pod="openstack/nova-api-c196-account-create-update-629fc" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.042284 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbljz\" (UniqueName: \"kubernetes.io/projected/822eb949-28b7-4487-9672-f4cd8dd3faa3-kube-api-access-tbljz\") pod \"nova-cell1-db-create-pkjmb\" (UID: \"822eb949-28b7-4487-9672-f4cd8dd3faa3\") " pod="openstack/nova-cell1-db-create-pkjmb" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.042308 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56ef7f70-9cff-4db0-a699-f6b3496ed677-operator-scripts\") pod \"nova-api-c196-account-create-update-629fc\" (UID: \"56ef7f70-9cff-4db0-a699-f6b3496ed677\") " pod="openstack/nova-api-c196-account-create-update-629fc" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.042845 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zzvp\" (UniqueName: \"kubernetes.io/projected/72dd7822-885f-4b25-9843-39d598af3697-kube-api-access-4zzvp\") pod \"nova-cell0-db-create-cfvxh\" (UID: \"72dd7822-885f-4b25-9843-39d598af3697\") " pod="openstack/nova-cell0-db-create-cfvxh" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.042867 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72dd7822-885f-4b25-9843-39d598af3697-operator-scripts\") pod \"nova-cell0-db-create-cfvxh\" (UID: \"72dd7822-885f-4b25-9843-39d598af3697\") " pod="openstack/nova-cell0-db-create-cfvxh" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.042883 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/822eb949-28b7-4487-9672-f4cd8dd3faa3-operator-scripts\") pod \"nova-cell1-db-create-pkjmb\" (UID: \"822eb949-28b7-4487-9672-f4cd8dd3faa3\") " pod="openstack/nova-cell1-db-create-pkjmb" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.052469 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56ef7f70-9cff-4db0-a699-f6b3496ed677-operator-scripts\") pod \"nova-api-c196-account-create-update-629fc\" (UID: \"56ef7f70-9cff-4db0-a699-f6b3496ed677\") " pod="openstack/nova-api-c196-account-create-update-629fc" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.053335 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72dd7822-885f-4b25-9843-39d598af3697-operator-scripts\") pod \"nova-cell0-db-create-cfvxh\" (UID: \"72dd7822-885f-4b25-9843-39d598af3697\") " pod="openstack/nova-cell0-db-create-cfvxh" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.082005 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zzvp\" (UniqueName: \"kubernetes.io/projected/72dd7822-885f-4b25-9843-39d598af3697-kube-api-access-4zzvp\") pod \"nova-cell0-db-create-cfvxh\" (UID: \"72dd7822-885f-4b25-9843-39d598af3697\") " pod="openstack/nova-cell0-db-create-cfvxh" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.090523 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pflfm\" (UniqueName: \"kubernetes.io/projected/56ef7f70-9cff-4db0-a699-f6b3496ed677-kube-api-access-pflfm\") pod \"nova-api-c196-account-create-update-629fc\" (UID: \"56ef7f70-9cff-4db0-a699-f6b3496ed677\") " pod="openstack/nova-api-c196-account-create-update-629fc" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.146687 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c196-account-create-update-629fc" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.154226 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpclg\" (UniqueName: \"kubernetes.io/projected/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-kube-api-access-tpclg\") pod \"nova-cell0-d7a8-account-create-update-9kn7n\" (UID: \"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9\") " pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.154328 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbljz\" (UniqueName: \"kubernetes.io/projected/822eb949-28b7-4487-9672-f4cd8dd3faa3-kube-api-access-tbljz\") pod \"nova-cell1-db-create-pkjmb\" (UID: \"822eb949-28b7-4487-9672-f4cd8dd3faa3\") " pod="openstack/nova-cell1-db-create-pkjmb" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.154376 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-operator-scripts\") pod \"nova-cell0-d7a8-account-create-update-9kn7n\" (UID: \"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9\") " pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.154425 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/822eb949-28b7-4487-9672-f4cd8dd3faa3-operator-scripts\") pod \"nova-cell1-db-create-pkjmb\" (UID: \"822eb949-28b7-4487-9672-f4cd8dd3faa3\") " pod="openstack/nova-cell1-db-create-pkjmb" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.155060 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/822eb949-28b7-4487-9672-f4cd8dd3faa3-operator-scripts\") pod \"nova-cell1-db-create-pkjmb\" (UID: \"822eb949-28b7-4487-9672-f4cd8dd3faa3\") " pod="openstack/nova-cell1-db-create-pkjmb" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.178657 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbljz\" (UniqueName: \"kubernetes.io/projected/822eb949-28b7-4487-9672-f4cd8dd3faa3-kube-api-access-tbljz\") pod \"nova-cell1-db-create-pkjmb\" (UID: \"822eb949-28b7-4487-9672-f4cd8dd3faa3\") " pod="openstack/nova-cell1-db-create-pkjmb" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.199979 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-3e2f-account-create-update-w9xkp"] Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.201716 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.203801 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.241485 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3e2f-account-create-update-w9xkp"] Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.257210 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpclg\" (UniqueName: \"kubernetes.io/projected/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-kube-api-access-tpclg\") pod \"nova-cell0-d7a8-account-create-update-9kn7n\" (UID: \"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9\") " pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.257350 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdf2g\" (UniqueName: \"kubernetes.io/projected/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-kube-api-access-sdf2g\") pod \"nova-cell1-3e2f-account-create-update-w9xkp\" (UID: \"d4160ee3-8aac-4f22-b6c4-cce7d18781d6\") " pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.257381 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-operator-scripts\") pod \"nova-cell0-d7a8-account-create-update-9kn7n\" (UID: \"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9\") " pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.257433 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-operator-scripts\") pod \"nova-cell1-3e2f-account-create-update-w9xkp\" (UID: \"d4160ee3-8aac-4f22-b6c4-cce7d18781d6\") " pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.258758 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-operator-scripts\") pod \"nova-cell0-d7a8-account-create-update-9kn7n\" (UID: \"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9\") " pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.287053 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpclg\" (UniqueName: \"kubernetes.io/projected/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-kube-api-access-tpclg\") pod \"nova-cell0-d7a8-account-create-update-9kn7n\" (UID: \"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9\") " pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.296964 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.321433 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4f937a46-5b8e-4d1c-bd05-3b729ffb8188","Type":"ContainerStarted","Data":"fd802d888676953a87165e66a9bd7497052ebe529b4b735a20d73e66e0777c08"} Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.324231 4876 generic.go:334] "Generic (PLEG): container finished" podID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerID="c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c" exitCode=0 Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.324258 4876 generic.go:334] "Generic (PLEG): container finished" podID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerID="de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225" exitCode=2 Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.324268 4876 generic.go:334] "Generic (PLEG): container finished" podID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerID="51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b" exitCode=0 Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.324282 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d","Type":"ContainerDied","Data":"c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c"} Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.324300 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d","Type":"ContainerDied","Data":"de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225"} Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.324309 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d","Type":"ContainerDied","Data":"51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b"} Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.325758 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pkjmb" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.360497 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdf2g\" (UniqueName: \"kubernetes.io/projected/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-kube-api-access-sdf2g\") pod \"nova-cell1-3e2f-account-create-update-w9xkp\" (UID: \"d4160ee3-8aac-4f22-b6c4-cce7d18781d6\") " pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.361287 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-operator-scripts\") pod \"nova-cell1-3e2f-account-create-update-w9xkp\" (UID: \"d4160ee3-8aac-4f22-b6c4-cce7d18781d6\") " pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.362719 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-operator-scripts\") pod \"nova-cell1-3e2f-account-create-update-w9xkp\" (UID: \"d4160ee3-8aac-4f22-b6c4-cce7d18781d6\") " pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.376976 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cfvxh" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.401922 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdf2g\" (UniqueName: \"kubernetes.io/projected/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-kube-api-access-sdf2g\") pod \"nova-cell1-3e2f-account-create-update-w9xkp\" (UID: \"d4160ee3-8aac-4f22-b6c4-cce7d18781d6\") " pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.611848 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.623174 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-c196-account-create-update-629fc"] Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.663732 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-2ht2h"] Dec 15 07:14:07 crc kubenswrapper[4876]: W1215 07:14:07.677673 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56ef7f70_9cff_4db0_a699_f6b3496ed677.slice/crio-b16ba4d00ffa8a4c54f9d1ff50b44f154c6b53ee7d049f582de5254933df4592 WatchSource:0}: Error finding container b16ba4d00ffa8a4c54f9d1ff50b44f154c6b53ee7d049f582de5254933df4592: Status 404 returned error can't find the container with id b16ba4d00ffa8a4c54f9d1ff50b44f154c6b53ee7d049f582de5254933df4592 Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.883055 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.886333 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:14:07 crc kubenswrapper[4876]: W1215 07:14:07.945901 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda502b9f7_3c6f_43fb_a1e7_b55ade6448a9.slice/crio-2501c9142b281dfe11594e5b0dd02562a169b271063d0da29942574c6be259bd WatchSource:0}: Error finding container 2501c9142b281dfe11594e5b0dd02562a169b271063d0da29942574c6be259bd: Status 404 returned error can't find the container with id 2501c9142b281dfe11594e5b0dd02562a169b271063d0da29942574c6be259bd Dec 15 07:14:07 crc kubenswrapper[4876]: I1215 07:14:07.965662 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d7a8-account-create-update-9kn7n"] Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.063991 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-cfvxh"] Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.076201 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-pkjmb"] Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.265738 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3e2f-account-create-update-w9xkp"] Dec 15 07:14:08 crc kubenswrapper[4876]: W1215 07:14:08.296048 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4160ee3_8aac_4f22_b6c4_cce7d18781d6.slice/crio-d5d4ffd306b3eb93f61f40cc8a46a472dca479d822d1c087ddc877c399dc65c5 WatchSource:0}: Error finding container d5d4ffd306b3eb93f61f40cc8a46a472dca479d822d1c087ddc877c399dc65c5: Status 404 returned error can't find the container with id d5d4ffd306b3eb93f61f40cc8a46a472dca479d822d1c087ddc877c399dc65c5 Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.336838 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pkjmb" event={"ID":"822eb949-28b7-4487-9672-f4cd8dd3faa3","Type":"ContainerStarted","Data":"f855bcfc87aa1004b8940366dae5e2883b5a1cd2c93b3d1d4fb168e9eba3ca6a"} Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.338581 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4f937a46-5b8e-4d1c-bd05-3b729ffb8188","Type":"ContainerStarted","Data":"77739227b2e4df94b91265646c063d9a27732d0f675c7fd40830a4b54d3a0c86"} Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.340576 4876 generic.go:334] "Generic (PLEG): container finished" podID="58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd" containerID="6ef07e3c5b93e563fee5ab9c6f22518b4efbc5830572701e2b5b1257e3857c7d" exitCode=0 Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.340628 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2ht2h" event={"ID":"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd","Type":"ContainerDied","Data":"6ef07e3c5b93e563fee5ab9c6f22518b4efbc5830572701e2b5b1257e3857c7d"} Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.340647 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2ht2h" event={"ID":"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd","Type":"ContainerStarted","Data":"a0c5617cb1f6a77aa2f9023803ae5a4dd983693571c5fb7babc64bad64e58804"} Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.344860 4876 generic.go:334] "Generic (PLEG): container finished" podID="56ef7f70-9cff-4db0-a699-f6b3496ed677" containerID="b3bd0b7823735f04ec2747afdac419e40693e5f817e9f14ee5782df58a16fb64" exitCode=0 Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.344951 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c196-account-create-update-629fc" event={"ID":"56ef7f70-9cff-4db0-a699-f6b3496ed677","Type":"ContainerDied","Data":"b3bd0b7823735f04ec2747afdac419e40693e5f817e9f14ee5782df58a16fb64"} Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.344995 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c196-account-create-update-629fc" event={"ID":"56ef7f70-9cff-4db0-a699-f6b3496ed677","Type":"ContainerStarted","Data":"b16ba4d00ffa8a4c54f9d1ff50b44f154c6b53ee7d049f582de5254933df4592"} Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.350688 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" event={"ID":"d4160ee3-8aac-4f22-b6c4-cce7d18781d6","Type":"ContainerStarted","Data":"d5d4ffd306b3eb93f61f40cc8a46a472dca479d822d1c087ddc877c399dc65c5"} Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.353256 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" event={"ID":"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9","Type":"ContainerStarted","Data":"2501c9142b281dfe11594e5b0dd02562a169b271063d0da29942574c6be259bd"} Dec 15 07:14:08 crc kubenswrapper[4876]: I1215 07:14:08.356345 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cfvxh" event={"ID":"72dd7822-885f-4b25-9843-39d598af3697","Type":"ContainerStarted","Data":"e487d5e2b26dd8614572bc050117455fb089f419c7d986966f5134d829a45a04"} Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.020350 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.202339 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2q25c\" (UniqueName: \"kubernetes.io/projected/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-kube-api-access-2q25c\") pod \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.202543 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-run-httpd\") pod \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.202709 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-config-data\") pod \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.202902 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" (UID: "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.202966 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-combined-ca-bundle\") pod \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.203044 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-sg-core-conf-yaml\") pod \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.203198 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-scripts\") pod \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.203273 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-log-httpd\") pod \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\" (UID: \"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d\") " Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.203706 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" (UID: "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.204608 4876 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.204635 4876 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.209866 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-scripts" (OuterVolumeSpecName: "scripts") pod "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" (UID: "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.221637 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-kube-api-access-2q25c" (OuterVolumeSpecName: "kube-api-access-2q25c") pod "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" (UID: "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d"). InnerVolumeSpecName "kube-api-access-2q25c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.248887 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" (UID: "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.280281 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" (UID: "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.306184 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.306213 4876 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.306222 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.306231 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2q25c\" (UniqueName: \"kubernetes.io/projected/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-kube-api-access-2q25c\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.309512 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-config-data" (OuterVolumeSpecName: "config-data") pod "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" (UID: "ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.365902 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.366287 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" containerName="glance-log" containerID="cri-o://b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f" gracePeriod=30 Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.366578 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" containerName="glance-httpd" containerID="cri-o://0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4" gracePeriod=30 Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.374518 4876 generic.go:334] "Generic (PLEG): container finished" podID="72dd7822-885f-4b25-9843-39d598af3697" containerID="64ebd7f515630c3d30ddbca962f75c5a3dc31b0c6d22a8af88bed0a2533b325f" exitCode=0 Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.374725 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cfvxh" event={"ID":"72dd7822-885f-4b25-9843-39d598af3697","Type":"ContainerDied","Data":"64ebd7f515630c3d30ddbca962f75c5a3dc31b0c6d22a8af88bed0a2533b325f"} Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.376772 4876 generic.go:334] "Generic (PLEG): container finished" podID="822eb949-28b7-4487-9672-f4cd8dd3faa3" containerID="afbe396af5b5d28cb16805ba0df6ca3ec32ddb8528a8b72ae216c9953653f09c" exitCode=0 Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.376968 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pkjmb" event={"ID":"822eb949-28b7-4487-9672-f4cd8dd3faa3","Type":"ContainerDied","Data":"afbe396af5b5d28cb16805ba0df6ca3ec32ddb8528a8b72ae216c9953653f09c"} Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.380926 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4f937a46-5b8e-4d1c-bd05-3b729ffb8188","Type":"ContainerStarted","Data":"1cb73a4bf7ed52b3476b312fe18978e8dfe39c5143fc358708eff5808b066bce"} Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.387909 4876 generic.go:334] "Generic (PLEG): container finished" podID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerID="827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d" exitCode=0 Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.388001 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d","Type":"ContainerDied","Data":"827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d"} Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.388045 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d","Type":"ContainerDied","Data":"fda211ec451e132ab210e68b01d4b186fc84ed2817a84e88d2616a336c338d76"} Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.388068 4876 scope.go:117] "RemoveContainer" containerID="c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.388330 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.396652 4876 generic.go:334] "Generic (PLEG): container finished" podID="d4160ee3-8aac-4f22-b6c4-cce7d18781d6" containerID="e1b05578ca205ace99ce2b9f59076a765fe7abebc45a03c8c6cad9fd601c4197" exitCode=0 Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.397000 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" event={"ID":"d4160ee3-8aac-4f22-b6c4-cce7d18781d6","Type":"ContainerDied","Data":"e1b05578ca205ace99ce2b9f59076a765fe7abebc45a03c8c6cad9fd601c4197"} Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.407654 4876 generic.go:334] "Generic (PLEG): container finished" podID="a502b9f7-3c6f-43fb-a1e7-b55ade6448a9" containerID="6687e14dbc12d2a78d968537c35d1dd9ec16adfd9f027ee295ff8680066dcf82" exitCode=0 Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.408086 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" event={"ID":"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9","Type":"ContainerDied","Data":"6687e14dbc12d2a78d968537c35d1dd9ec16adfd9f027ee295ff8680066dcf82"} Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.423137 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.460834 4876 scope.go:117] "RemoveContainer" containerID="de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.510160 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.510144464 podStartE2EDuration="4.510144464s" podCreationTimestamp="2025-12-15 07:14:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:14:09.50242404 +0000 UTC m=+1375.073566951" watchObservedRunningTime="2025-12-15 07:14:09.510144464 +0000 UTC m=+1375.081287365" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.546198 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.578218 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.585730 4876 scope.go:117] "RemoveContainer" containerID="51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.587868 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:09 crc kubenswrapper[4876]: E1215 07:14:09.588670 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="sg-core" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.588684 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="sg-core" Dec 15 07:14:09 crc kubenswrapper[4876]: E1215 07:14:09.588696 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="ceilometer-notification-agent" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.588703 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="ceilometer-notification-agent" Dec 15 07:14:09 crc kubenswrapper[4876]: E1215 07:14:09.588732 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="ceilometer-central-agent" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.588738 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="ceilometer-central-agent" Dec 15 07:14:09 crc kubenswrapper[4876]: E1215 07:14:09.588747 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="proxy-httpd" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.588753 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="proxy-httpd" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.588929 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="sg-core" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.588939 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="ceilometer-notification-agent" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.588953 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="proxy-httpd" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.588970 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" containerName="ceilometer-central-agent" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.590564 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.595718 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.595994 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.596738 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.622360 4876 scope.go:117] "RemoveContainer" containerID="827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.643929 4876 scope.go:117] "RemoveContainer" containerID="c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c" Dec 15 07:14:09 crc kubenswrapper[4876]: E1215 07:14:09.644403 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c\": container with ID starting with c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c not found: ID does not exist" containerID="c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.644444 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c"} err="failed to get container status \"c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c\": rpc error: code = NotFound desc = could not find container \"c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c\": container with ID starting with c52959a7e0faf9362494d1486149405848c74af49a313cddc71d7acfb8c5b64c not found: ID does not exist" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.644470 4876 scope.go:117] "RemoveContainer" containerID="de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225" Dec 15 07:14:09 crc kubenswrapper[4876]: E1215 07:14:09.644780 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225\": container with ID starting with de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225 not found: ID does not exist" containerID="de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.644808 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225"} err="failed to get container status \"de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225\": rpc error: code = NotFound desc = could not find container \"de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225\": container with ID starting with de38d506212e5ba84d515bc9992b4db0c46f198a40a5f0050afe337ff3acc225 not found: ID does not exist" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.644833 4876 scope.go:117] "RemoveContainer" containerID="51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b" Dec 15 07:14:09 crc kubenswrapper[4876]: E1215 07:14:09.645254 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b\": container with ID starting with 51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b not found: ID does not exist" containerID="51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.645307 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b"} err="failed to get container status \"51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b\": rpc error: code = NotFound desc = could not find container \"51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b\": container with ID starting with 51bc9e001e3c0da82384e384048c34095b6ea92ff9a6e29c1c88e008c1ecb44b not found: ID does not exist" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.645342 4876 scope.go:117] "RemoveContainer" containerID="827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d" Dec 15 07:14:09 crc kubenswrapper[4876]: E1215 07:14:09.645769 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d\": container with ID starting with 827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d not found: ID does not exist" containerID="827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.645803 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d"} err="failed to get container status \"827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d\": rpc error: code = NotFound desc = could not find container \"827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d\": container with ID starting with 827accecedb1d87d0e7dae1693085df2db1af1a5a6e23113ce38064f5b8b343d not found: ID does not exist" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.729171 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnhxw\" (UniqueName: \"kubernetes.io/projected/7268fa60-bc38-4264-9830-1946009bb42d-kube-api-access-rnhxw\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.729365 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-scripts\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.730054 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.730167 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-log-httpd\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.730248 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-run-httpd\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.730308 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.730422 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-config-data\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.785403 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2ht2h" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.832681 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-scripts\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.832806 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.832889 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-log-httpd\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.832920 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-run-httpd\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.833010 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.833049 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-config-data\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.833130 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnhxw\" (UniqueName: \"kubernetes.io/projected/7268fa60-bc38-4264-9830-1946009bb42d-kube-api-access-rnhxw\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.840580 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-scripts\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.842245 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-run-httpd\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.842611 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.843009 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.844444 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-log-httpd\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.845155 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-config-data\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.849594 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnhxw\" (UniqueName: \"kubernetes.io/projected/7268fa60-bc38-4264-9830-1946009bb42d-kube-api-access-rnhxw\") pod \"ceilometer-0\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.853803 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c196-account-create-update-629fc" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.929760 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.935999 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-operator-scripts\") pod \"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd\" (UID: \"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd\") " Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.936142 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6b6b\" (UniqueName: \"kubernetes.io/projected/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-kube-api-access-c6b6b\") pod \"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd\" (UID: \"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd\") " Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.937600 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd" (UID: "58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:09 crc kubenswrapper[4876]: I1215 07:14:09.941510 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-kube-api-access-c6b6b" (OuterVolumeSpecName: "kube-api-access-c6b6b") pod "58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd" (UID: "58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd"). InnerVolumeSpecName "kube-api-access-c6b6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.042666 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pflfm\" (UniqueName: \"kubernetes.io/projected/56ef7f70-9cff-4db0-a699-f6b3496ed677-kube-api-access-pflfm\") pod \"56ef7f70-9cff-4db0-a699-f6b3496ed677\" (UID: \"56ef7f70-9cff-4db0-a699-f6b3496ed677\") " Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.042927 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56ef7f70-9cff-4db0-a699-f6b3496ed677-operator-scripts\") pod \"56ef7f70-9cff-4db0-a699-f6b3496ed677\" (UID: \"56ef7f70-9cff-4db0-a699-f6b3496ed677\") " Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.043445 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.043456 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6b6b\" (UniqueName: \"kubernetes.io/projected/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd-kube-api-access-c6b6b\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.044082 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56ef7f70-9cff-4db0-a699-f6b3496ed677-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "56ef7f70-9cff-4db0-a699-f6b3496ed677" (UID: "56ef7f70-9cff-4db0-a699-f6b3496ed677"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.052321 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56ef7f70-9cff-4db0-a699-f6b3496ed677-kube-api-access-pflfm" (OuterVolumeSpecName: "kube-api-access-pflfm") pod "56ef7f70-9cff-4db0-a699-f6b3496ed677" (UID: "56ef7f70-9cff-4db0-a699-f6b3496ed677"). InnerVolumeSpecName "kube-api-access-pflfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.144755 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pflfm\" (UniqueName: \"kubernetes.io/projected/56ef7f70-9cff-4db0-a699-f6b3496ed677-kube-api-access-pflfm\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.144814 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56ef7f70-9cff-4db0-a699-f6b3496ed677-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.376872 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.386869 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.418694 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-c196-account-create-update-629fc" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.418685 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-c196-account-create-update-629fc" event={"ID":"56ef7f70-9cff-4db0-a699-f6b3496ed677","Type":"ContainerDied","Data":"b16ba4d00ffa8a4c54f9d1ff50b44f154c6b53ee7d049f582de5254933df4592"} Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.418843 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b16ba4d00ffa8a4c54f9d1ff50b44f154c6b53ee7d049f582de5254933df4592" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.419767 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7268fa60-bc38-4264-9830-1946009bb42d","Type":"ContainerStarted","Data":"314ef26e8778ce69eb3047e6c448543593fbdba60ba2ed35f75b8b0ec8f68afb"} Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.422602 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-2ht2h" event={"ID":"58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd","Type":"ContainerDied","Data":"a0c5617cb1f6a77aa2f9023803ae5a4dd983693571c5fb7babc64bad64e58804"} Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.422635 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0c5617cb1f6a77aa2f9023803ae5a4dd983693571c5fb7babc64bad64e58804" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.422611 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-2ht2h" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.424611 4876 generic.go:334] "Generic (PLEG): container finished" podID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" containerID="b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f" exitCode=143 Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.424703 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"764e8736-0f6f-49ee-9ca8-d6dd98ddf969","Type":"ContainerDied","Data":"b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f"} Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.726926 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d" path="/var/lib/kubelet/pods/ff1a4dc8-dce2-44fd-a9f1-7bec30b92a9d/volumes" Dec 15 07:14:10 crc kubenswrapper[4876]: I1215 07:14:10.934818 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pkjmb" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.022226 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cfvxh" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.028609 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.069547 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/822eb949-28b7-4487-9672-f4cd8dd3faa3-operator-scripts\") pod \"822eb949-28b7-4487-9672-f4cd8dd3faa3\" (UID: \"822eb949-28b7-4487-9672-f4cd8dd3faa3\") " Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.069620 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbljz\" (UniqueName: \"kubernetes.io/projected/822eb949-28b7-4487-9672-f4cd8dd3faa3-kube-api-access-tbljz\") pod \"822eb949-28b7-4487-9672-f4cd8dd3faa3\" (UID: \"822eb949-28b7-4487-9672-f4cd8dd3faa3\") " Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.078850 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/822eb949-28b7-4487-9672-f4cd8dd3faa3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "822eb949-28b7-4487-9672-f4cd8dd3faa3" (UID: "822eb949-28b7-4487-9672-f4cd8dd3faa3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.094277 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/822eb949-28b7-4487-9672-f4cd8dd3faa3-kube-api-access-tbljz" (OuterVolumeSpecName: "kube-api-access-tbljz") pod "822eb949-28b7-4487-9672-f4cd8dd3faa3" (UID: "822eb949-28b7-4487-9672-f4cd8dd3faa3"). InnerVolumeSpecName "kube-api-access-tbljz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.174426 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72dd7822-885f-4b25-9843-39d598af3697-operator-scripts\") pod \"72dd7822-885f-4b25-9843-39d598af3697\" (UID: \"72dd7822-885f-4b25-9843-39d598af3697\") " Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.174874 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zzvp\" (UniqueName: \"kubernetes.io/projected/72dd7822-885f-4b25-9843-39d598af3697-kube-api-access-4zzvp\") pod \"72dd7822-885f-4b25-9843-39d598af3697\" (UID: \"72dd7822-885f-4b25-9843-39d598af3697\") " Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.174907 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdf2g\" (UniqueName: \"kubernetes.io/projected/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-kube-api-access-sdf2g\") pod \"d4160ee3-8aac-4f22-b6c4-cce7d18781d6\" (UID: \"d4160ee3-8aac-4f22-b6c4-cce7d18781d6\") " Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.174943 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-operator-scripts\") pod \"d4160ee3-8aac-4f22-b6c4-cce7d18781d6\" (UID: \"d4160ee3-8aac-4f22-b6c4-cce7d18781d6\") " Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.175064 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72dd7822-885f-4b25-9843-39d598af3697-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "72dd7822-885f-4b25-9843-39d598af3697" (UID: "72dd7822-885f-4b25-9843-39d598af3697"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.175391 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/822eb949-28b7-4487-9672-f4cd8dd3faa3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.175409 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbljz\" (UniqueName: \"kubernetes.io/projected/822eb949-28b7-4487-9672-f4cd8dd3faa3-kube-api-access-tbljz\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.175421 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72dd7822-885f-4b25-9843-39d598af3697-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.175807 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d4160ee3-8aac-4f22-b6c4-cce7d18781d6" (UID: "d4160ee3-8aac-4f22-b6c4-cce7d18781d6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.178344 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72dd7822-885f-4b25-9843-39d598af3697-kube-api-access-4zzvp" (OuterVolumeSpecName: "kube-api-access-4zzvp") pod "72dd7822-885f-4b25-9843-39d598af3697" (UID: "72dd7822-885f-4b25-9843-39d598af3697"). InnerVolumeSpecName "kube-api-access-4zzvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.182262 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-kube-api-access-sdf2g" (OuterVolumeSpecName: "kube-api-access-sdf2g") pod "d4160ee3-8aac-4f22-b6c4-cce7d18781d6" (UID: "d4160ee3-8aac-4f22-b6c4-cce7d18781d6"). InnerVolumeSpecName "kube-api-access-sdf2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.274434 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.277152 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zzvp\" (UniqueName: \"kubernetes.io/projected/72dd7822-885f-4b25-9843-39d598af3697-kube-api-access-4zzvp\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.277211 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdf2g\" (UniqueName: \"kubernetes.io/projected/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-kube-api-access-sdf2g\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.277227 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4160ee3-8aac-4f22-b6c4-cce7d18781d6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.378136 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-operator-scripts\") pod \"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9\" (UID: \"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9\") " Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.378301 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpclg\" (UniqueName: \"kubernetes.io/projected/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-kube-api-access-tpclg\") pod \"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9\" (UID: \"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9\") " Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.378605 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a502b9f7-3c6f-43fb-a1e7-b55ade6448a9" (UID: "a502b9f7-3c6f-43fb-a1e7-b55ade6448a9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.378802 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.381891 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-kube-api-access-tpclg" (OuterVolumeSpecName: "kube-api-access-tpclg") pod "a502b9f7-3c6f-43fb-a1e7-b55ade6448a9" (UID: "a502b9f7-3c6f-43fb-a1e7-b55ade6448a9"). InnerVolumeSpecName "kube-api-access-tpclg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.455601 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-pkjmb" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.455583 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-pkjmb" event={"ID":"822eb949-28b7-4487-9672-f4cd8dd3faa3","Type":"ContainerDied","Data":"f855bcfc87aa1004b8940366dae5e2883b5a1cd2c93b3d1d4fb168e9eba3ca6a"} Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.455745 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f855bcfc87aa1004b8940366dae5e2883b5a1cd2c93b3d1d4fb168e9eba3ca6a" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.458537 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" event={"ID":"d4160ee3-8aac-4f22-b6c4-cce7d18781d6","Type":"ContainerDied","Data":"d5d4ffd306b3eb93f61f40cc8a46a472dca479d822d1c087ddc877c399dc65c5"} Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.458591 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5d4ffd306b3eb93f61f40cc8a46a472dca479d822d1c087ddc877c399dc65c5" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.458549 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3e2f-account-create-update-w9xkp" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.461255 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" event={"ID":"a502b9f7-3c6f-43fb-a1e7-b55ade6448a9","Type":"ContainerDied","Data":"2501c9142b281dfe11594e5b0dd02562a169b271063d0da29942574c6be259bd"} Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.461316 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2501c9142b281dfe11594e5b0dd02562a169b271063d0da29942574c6be259bd" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.461287 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d7a8-account-create-update-9kn7n" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.463262 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7268fa60-bc38-4264-9830-1946009bb42d","Type":"ContainerStarted","Data":"41f9a4fec6b89658da5df5f87093799ed153ba1bc816d47ee4020d4097c1c82e"} Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.465161 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-cfvxh" event={"ID":"72dd7822-885f-4b25-9843-39d598af3697","Type":"ContainerDied","Data":"e487d5e2b26dd8614572bc050117455fb089f419c7d986966f5134d829a45a04"} Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.465200 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e487d5e2b26dd8614572bc050117455fb089f419c7d986966f5134d829a45a04" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.465215 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-cfvxh" Dec 15 07:14:11 crc kubenswrapper[4876]: I1215 07:14:11.503703 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpclg\" (UniqueName: \"kubernetes.io/projected/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9-kube-api-access-tpclg\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:12 crc kubenswrapper[4876]: I1215 07:14:12.479581 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7268fa60-bc38-4264-9830-1946009bb42d","Type":"ContainerStarted","Data":"caaeab57db4096f6cd5f2006b19dc4175c81e775f70ec3c23f26ec3814088e0e"} Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.001001 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.051025 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-config-data\") pod \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.051125 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxzlz\" (UniqueName: \"kubernetes.io/projected/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-kube-api-access-vxzlz\") pod \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.051155 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-combined-ca-bundle\") pod \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.051332 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-httpd-run\") pod \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.075992 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "764e8736-0f6f-49ee-9ca8-d6dd98ddf969" (UID: "764e8736-0f6f-49ee-9ca8-d6dd98ddf969"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.079323 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-kube-api-access-vxzlz" (OuterVolumeSpecName: "kube-api-access-vxzlz") pod "764e8736-0f6f-49ee-9ca8-d6dd98ddf969" (UID: "764e8736-0f6f-49ee-9ca8-d6dd98ddf969"). InnerVolumeSpecName "kube-api-access-vxzlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.118044 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "764e8736-0f6f-49ee-9ca8-d6dd98ddf969" (UID: "764e8736-0f6f-49ee-9ca8-d6dd98ddf969"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.146644 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-config-data" (OuterVolumeSpecName: "config-data") pod "764e8736-0f6f-49ee-9ca8-d6dd98ddf969" (UID: "764e8736-0f6f-49ee-9ca8-d6dd98ddf969"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.152729 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-scripts\") pod \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.152779 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-logs\") pod \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.152799 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.152906 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-public-tls-certs\") pod \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\" (UID: \"764e8736-0f6f-49ee-9ca8-d6dd98ddf969\") " Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.154784 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.154890 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxzlz\" (UniqueName: \"kubernetes.io/projected/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-kube-api-access-vxzlz\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.154908 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.154917 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.155351 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-logs" (OuterVolumeSpecName: "logs") pod "764e8736-0f6f-49ee-9ca8-d6dd98ddf969" (UID: "764e8736-0f6f-49ee-9ca8-d6dd98ddf969"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.157582 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-scripts" (OuterVolumeSpecName: "scripts") pod "764e8736-0f6f-49ee-9ca8-d6dd98ddf969" (UID: "764e8736-0f6f-49ee-9ca8-d6dd98ddf969"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.166374 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "764e8736-0f6f-49ee-9ca8-d6dd98ddf969" (UID: "764e8736-0f6f-49ee-9ca8-d6dd98ddf969"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.227054 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "764e8736-0f6f-49ee-9ca8-d6dd98ddf969" (UID: "764e8736-0f6f-49ee-9ca8-d6dd98ddf969"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.257019 4876 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.257061 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.257073 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/764e8736-0f6f-49ee-9ca8-d6dd98ddf969-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.257129 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.284020 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.358725 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.490875 4876 generic.go:334] "Generic (PLEG): container finished" podID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" containerID="0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4" exitCode=0 Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.490938 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.490962 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"764e8736-0f6f-49ee-9ca8-d6dd98ddf969","Type":"ContainerDied","Data":"0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4"} Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.490999 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"764e8736-0f6f-49ee-9ca8-d6dd98ddf969","Type":"ContainerDied","Data":"ee66c89683db4aa505c096aedb1f92d696e0b3188ad23ae3fd23059483e7bf95"} Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.491019 4876 scope.go:117] "RemoveContainer" containerID="0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.493177 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7268fa60-bc38-4264-9830-1946009bb42d","Type":"ContainerStarted","Data":"408b2f11a5b3a265bb96d3b660cfdb6df8bcebf923344fca0c3bf0d8427d527c"} Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.516904 4876 scope.go:117] "RemoveContainer" containerID="b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.539358 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.553698 4876 scope.go:117] "RemoveContainer" containerID="0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4" Dec 15 07:14:13 crc kubenswrapper[4876]: E1215 07:14:13.554355 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4\": container with ID starting with 0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4 not found: ID does not exist" containerID="0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.554403 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4"} err="failed to get container status \"0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4\": rpc error: code = NotFound desc = could not find container \"0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4\": container with ID starting with 0052d6b74e823a1022a04311177fb11a254c046d8afbb55e119f70b40b6635c4 not found: ID does not exist" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.554437 4876 scope.go:117] "RemoveContainer" containerID="b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f" Dec 15 07:14:13 crc kubenswrapper[4876]: E1215 07:14:13.555340 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f\": container with ID starting with b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f not found: ID does not exist" containerID="b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.555374 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f"} err="failed to get container status \"b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f\": rpc error: code = NotFound desc = could not find container \"b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f\": container with ID starting with b4b20f3570fc9eb2183f25df2ab96dcbec5d15167b788913f2f53701972eb77f not found: ID does not exist" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.560961 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.579508 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:14:13 crc kubenswrapper[4876]: E1215 07:14:13.579928 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822eb949-28b7-4487-9672-f4cd8dd3faa3" containerName="mariadb-database-create" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.579949 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="822eb949-28b7-4487-9672-f4cd8dd3faa3" containerName="mariadb-database-create" Dec 15 07:14:13 crc kubenswrapper[4876]: E1215 07:14:13.579967 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56ef7f70-9cff-4db0-a699-f6b3496ed677" containerName="mariadb-account-create-update" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.579975 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="56ef7f70-9cff-4db0-a699-f6b3496ed677" containerName="mariadb-account-create-update" Dec 15 07:14:13 crc kubenswrapper[4876]: E1215 07:14:13.579991 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72dd7822-885f-4b25-9843-39d598af3697" containerName="mariadb-database-create" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.579997 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="72dd7822-885f-4b25-9843-39d598af3697" containerName="mariadb-database-create" Dec 15 07:14:13 crc kubenswrapper[4876]: E1215 07:14:13.580007 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd" containerName="mariadb-database-create" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580013 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd" containerName="mariadb-database-create" Dec 15 07:14:13 crc kubenswrapper[4876]: E1215 07:14:13.580022 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" containerName="glance-httpd" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580029 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" containerName="glance-httpd" Dec 15 07:14:13 crc kubenswrapper[4876]: E1215 07:14:13.580043 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" containerName="glance-log" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580052 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" containerName="glance-log" Dec 15 07:14:13 crc kubenswrapper[4876]: E1215 07:14:13.580065 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4160ee3-8aac-4f22-b6c4-cce7d18781d6" containerName="mariadb-account-create-update" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580073 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4160ee3-8aac-4f22-b6c4-cce7d18781d6" containerName="mariadb-account-create-update" Dec 15 07:14:13 crc kubenswrapper[4876]: E1215 07:14:13.580092 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a502b9f7-3c6f-43fb-a1e7-b55ade6448a9" containerName="mariadb-account-create-update" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580123 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a502b9f7-3c6f-43fb-a1e7-b55ade6448a9" containerName="mariadb-account-create-update" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580332 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="72dd7822-885f-4b25-9843-39d598af3697" containerName="mariadb-database-create" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580353 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" containerName="glance-log" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580367 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="56ef7f70-9cff-4db0-a699-f6b3496ed677" containerName="mariadb-account-create-update" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580379 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="822eb949-28b7-4487-9672-f4cd8dd3faa3" containerName="mariadb-database-create" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580392 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a502b9f7-3c6f-43fb-a1e7-b55ade6448a9" containerName="mariadb-account-create-update" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580401 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd" containerName="mariadb-database-create" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580415 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" containerName="glance-httpd" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.580428 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4160ee3-8aac-4f22-b6c4-cce7d18781d6" containerName="mariadb-account-create-update" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.581689 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.584728 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.586630 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.589813 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.766046 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws5vq\" (UniqueName: \"kubernetes.io/projected/1639e7c7-83c0-4a60-9f59-5b31772b9f35-kube-api-access-ws5vq\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.766163 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.766324 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-config-data\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.766418 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-logs\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.766616 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.766742 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.766797 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.766903 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-scripts\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.869064 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws5vq\" (UniqueName: \"kubernetes.io/projected/1639e7c7-83c0-4a60-9f59-5b31772b9f35-kube-api-access-ws5vq\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.869172 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.869258 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-config-data\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.869292 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-logs\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.869365 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.869436 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.869474 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.869509 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-scripts\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.870351 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.870452 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.870542 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-logs\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.878802 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.890022 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.890525 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-config-data\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.896803 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws5vq\" (UniqueName: \"kubernetes.io/projected/1639e7c7-83c0-4a60-9f59-5b31772b9f35-kube-api-access-ws5vq\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.897728 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-scripts\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:13 crc kubenswrapper[4876]: I1215 07:14:13.924592 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " pod="openstack/glance-default-external-api-0" Dec 15 07:14:14 crc kubenswrapper[4876]: I1215 07:14:14.213391 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:14:14 crc kubenswrapper[4876]: I1215 07:14:14.506251 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7268fa60-bc38-4264-9830-1946009bb42d","Type":"ContainerStarted","Data":"f6ea9e899e793de225d46e288e60eb3f946e38dc70fb8cf9256cef75c8d8ce87"} Dec 15 07:14:14 crc kubenswrapper[4876]: I1215 07:14:14.508173 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 15 07:14:14 crc kubenswrapper[4876]: I1215 07:14:14.532322 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.006525463 podStartE2EDuration="5.532306013s" podCreationTimestamp="2025-12-15 07:14:09 +0000 UTC" firstStartedPulling="2025-12-15 07:14:10.386652669 +0000 UTC m=+1375.957795570" lastFinishedPulling="2025-12-15 07:14:13.912433209 +0000 UTC m=+1379.483576120" observedRunningTime="2025-12-15 07:14:14.526736776 +0000 UTC m=+1380.097879687" watchObservedRunningTime="2025-12-15 07:14:14.532306013 +0000 UTC m=+1380.103448924" Dec 15 07:14:14 crc kubenswrapper[4876]: I1215 07:14:14.718821 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="764e8736-0f6f-49ee-9ca8-d6dd98ddf969" path="/var/lib/kubelet/pods/764e8736-0f6f-49ee-9ca8-d6dd98ddf969/volumes" Dec 15 07:14:14 crc kubenswrapper[4876]: I1215 07:14:14.827030 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:14:15 crc kubenswrapper[4876]: I1215 07:14:15.526235 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1639e7c7-83c0-4a60-9f59-5b31772b9f35","Type":"ContainerStarted","Data":"84b1c1cdb1f7106884cf4f5ab085a85917d93186b5d58717c1abe18b0a027118"} Dec 15 07:14:15 crc kubenswrapper[4876]: I1215 07:14:15.967421 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:15 crc kubenswrapper[4876]: I1215 07:14:15.967482 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:16 crc kubenswrapper[4876]: I1215 07:14:16.006472 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:16 crc kubenswrapper[4876]: I1215 07:14:16.017428 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:16 crc kubenswrapper[4876]: I1215 07:14:16.537780 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:16 crc kubenswrapper[4876]: I1215 07:14:16.537948 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.254305 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5sgjf"] Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.255957 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.258513 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.258679 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.258787 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-hn62j" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.272124 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5sgjf"] Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.355650 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.442162 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k72qj\" (UniqueName: \"kubernetes.io/projected/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-kube-api-access-k72qj\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.442272 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-config-data\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.442319 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.442383 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-scripts\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.544356 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-config-data\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.544427 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.544496 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-scripts\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.544518 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k72qj\" (UniqueName: \"kubernetes.io/projected/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-kube-api-access-k72qj\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.549545 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-scripts\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.549883 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="ceilometer-central-agent" containerID="cri-o://41f9a4fec6b89658da5df5f87093799ed153ba1bc816d47ee4020d4097c1c82e" gracePeriod=30 Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.550718 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1639e7c7-83c0-4a60-9f59-5b31772b9f35","Type":"ContainerStarted","Data":"c5bf69c338d2955649defd7611ec504382e5c3542edee5ca3f56d01332885ae6"} Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.550742 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1639e7c7-83c0-4a60-9f59-5b31772b9f35","Type":"ContainerStarted","Data":"cc294b31b47b3b2be756a6a68ef1afe2c1758bb4597cea8ea401001afc30deaa"} Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.551452 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="proxy-httpd" containerID="cri-o://f6ea9e899e793de225d46e288e60eb3f946e38dc70fb8cf9256cef75c8d8ce87" gracePeriod=30 Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.551513 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="sg-core" containerID="cri-o://408b2f11a5b3a265bb96d3b660cfdb6df8bcebf923344fca0c3bf0d8427d527c" gracePeriod=30 Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.551546 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="ceilometer-notification-agent" containerID="cri-o://caaeab57db4096f6cd5f2006b19dc4175c81e775f70ec3c23f26ec3814088e0e" gracePeriod=30 Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.557558 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-config-data\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.567299 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k72qj\" (UniqueName: \"kubernetes.io/projected/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-kube-api-access-k72qj\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.584907 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5sgjf\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.590534 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:17 crc kubenswrapper[4876]: I1215 07:14:17.606523 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.606500528 podStartE2EDuration="4.606500528s" podCreationTimestamp="2025-12-15 07:14:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:14:17.600739765 +0000 UTC m=+1383.171882676" watchObservedRunningTime="2025-12-15 07:14:17.606500528 +0000 UTC m=+1383.177643449" Dec 15 07:14:18 crc kubenswrapper[4876]: W1215 07:14:18.105970 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod686f7e40_e2a2_486b_83b0_4a0bc33f15e3.slice/crio-dc842d738437b20a9e9cabd1e4ffb1f86f6bd8b7bd82b9a5bde73842abbe8deb WatchSource:0}: Error finding container dc842d738437b20a9e9cabd1e4ffb1f86f6bd8b7bd82b9a5bde73842abbe8deb: Status 404 returned error can't find the container with id dc842d738437b20a9e9cabd1e4ffb1f86f6bd8b7bd82b9a5bde73842abbe8deb Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.111292 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5sgjf"] Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.564419 4876 generic.go:334] "Generic (PLEG): container finished" podID="7268fa60-bc38-4264-9830-1946009bb42d" containerID="f6ea9e899e793de225d46e288e60eb3f946e38dc70fb8cf9256cef75c8d8ce87" exitCode=0 Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.564463 4876 generic.go:334] "Generic (PLEG): container finished" podID="7268fa60-bc38-4264-9830-1946009bb42d" containerID="408b2f11a5b3a265bb96d3b660cfdb6df8bcebf923344fca0c3bf0d8427d527c" exitCode=2 Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.564473 4876 generic.go:334] "Generic (PLEG): container finished" podID="7268fa60-bc38-4264-9830-1946009bb42d" containerID="caaeab57db4096f6cd5f2006b19dc4175c81e775f70ec3c23f26ec3814088e0e" exitCode=0 Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.564490 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7268fa60-bc38-4264-9830-1946009bb42d","Type":"ContainerDied","Data":"f6ea9e899e793de225d46e288e60eb3f946e38dc70fb8cf9256cef75c8d8ce87"} Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.564538 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7268fa60-bc38-4264-9830-1946009bb42d","Type":"ContainerDied","Data":"408b2f11a5b3a265bb96d3b660cfdb6df8bcebf923344fca0c3bf0d8427d527c"} Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.564548 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7268fa60-bc38-4264-9830-1946009bb42d","Type":"ContainerDied","Data":"caaeab57db4096f6cd5f2006b19dc4175c81e775f70ec3c23f26ec3814088e0e"} Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.565846 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5sgjf" event={"ID":"686f7e40-e2a2-486b-83b0-4a0bc33f15e3","Type":"ContainerStarted","Data":"dc842d738437b20a9e9cabd1e4ffb1f86f6bd8b7bd82b9a5bde73842abbe8deb"} Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.565885 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.565902 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.830334 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:18 crc kubenswrapper[4876]: I1215 07:14:18.834843 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 15 07:14:23 crc kubenswrapper[4876]: I1215 07:14:23.616002 4876 generic.go:334] "Generic (PLEG): container finished" podID="7268fa60-bc38-4264-9830-1946009bb42d" containerID="41f9a4fec6b89658da5df5f87093799ed153ba1bc816d47ee4020d4097c1c82e" exitCode=0 Dec 15 07:14:23 crc kubenswrapper[4876]: I1215 07:14:23.616623 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7268fa60-bc38-4264-9830-1946009bb42d","Type":"ContainerDied","Data":"41f9a4fec6b89658da5df5f87093799ed153ba1bc816d47ee4020d4097c1c82e"} Dec 15 07:14:24 crc kubenswrapper[4876]: I1215 07:14:24.214688 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 15 07:14:24 crc kubenswrapper[4876]: I1215 07:14:24.214750 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 15 07:14:24 crc kubenswrapper[4876]: I1215 07:14:24.245053 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 15 07:14:24 crc kubenswrapper[4876]: I1215 07:14:24.262881 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 15 07:14:24 crc kubenswrapper[4876]: I1215 07:14:24.623667 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 15 07:14:24 crc kubenswrapper[4876]: I1215 07:14:24.623705 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.617641 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.635252 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.635267 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7268fa60-bc38-4264-9830-1946009bb42d","Type":"ContainerDied","Data":"314ef26e8778ce69eb3047e6c448543593fbdba60ba2ed35f75b8b0ec8f68afb"} Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.635353 4876 scope.go:117] "RemoveContainer" containerID="f6ea9e899e793de225d46e288e60eb3f946e38dc70fb8cf9256cef75c8d8ce87" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.640884 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5sgjf" event={"ID":"686f7e40-e2a2-486b-83b0-4a0bc33f15e3","Type":"ContainerStarted","Data":"929292f41d8a9a781bbba6431a1271b170eeff8362239c0fd03c074a8f5532c4"} Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.666924 4876 scope.go:117] "RemoveContainer" containerID="408b2f11a5b3a265bb96d3b660cfdb6df8bcebf923344fca0c3bf0d8427d527c" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.667057 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-5sgjf" podStartSLOduration=1.440128145 podStartE2EDuration="8.667018711s" podCreationTimestamp="2025-12-15 07:14:17 +0000 UTC" firstStartedPulling="2025-12-15 07:14:18.10817284 +0000 UTC m=+1383.679315751" lastFinishedPulling="2025-12-15 07:14:25.335063406 +0000 UTC m=+1390.906206317" observedRunningTime="2025-12-15 07:14:25.665484781 +0000 UTC m=+1391.236627692" watchObservedRunningTime="2025-12-15 07:14:25.667018711 +0000 UTC m=+1391.238161642" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.699309 4876 scope.go:117] "RemoveContainer" containerID="caaeab57db4096f6cd5f2006b19dc4175c81e775f70ec3c23f26ec3814088e0e" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.726277 4876 scope.go:117] "RemoveContainer" containerID="41f9a4fec6b89658da5df5f87093799ed153ba1bc816d47ee4020d4097c1c82e" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.774465 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-config-data\") pod \"7268fa60-bc38-4264-9830-1946009bb42d\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.774524 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-run-httpd\") pod \"7268fa60-bc38-4264-9830-1946009bb42d\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.774545 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-combined-ca-bundle\") pod \"7268fa60-bc38-4264-9830-1946009bb42d\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.774571 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-scripts\") pod \"7268fa60-bc38-4264-9830-1946009bb42d\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.774608 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-sg-core-conf-yaml\") pod \"7268fa60-bc38-4264-9830-1946009bb42d\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.774677 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnhxw\" (UniqueName: \"kubernetes.io/projected/7268fa60-bc38-4264-9830-1946009bb42d-kube-api-access-rnhxw\") pod \"7268fa60-bc38-4264-9830-1946009bb42d\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.774754 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-log-httpd\") pod \"7268fa60-bc38-4264-9830-1946009bb42d\" (UID: \"7268fa60-bc38-4264-9830-1946009bb42d\") " Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.777753 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7268fa60-bc38-4264-9830-1946009bb42d" (UID: "7268fa60-bc38-4264-9830-1946009bb42d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.778269 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7268fa60-bc38-4264-9830-1946009bb42d" (UID: "7268fa60-bc38-4264-9830-1946009bb42d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.781649 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-scripts" (OuterVolumeSpecName: "scripts") pod "7268fa60-bc38-4264-9830-1946009bb42d" (UID: "7268fa60-bc38-4264-9830-1946009bb42d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.781755 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7268fa60-bc38-4264-9830-1946009bb42d-kube-api-access-rnhxw" (OuterVolumeSpecName: "kube-api-access-rnhxw") pod "7268fa60-bc38-4264-9830-1946009bb42d" (UID: "7268fa60-bc38-4264-9830-1946009bb42d"). InnerVolumeSpecName "kube-api-access-rnhxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.806658 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7268fa60-bc38-4264-9830-1946009bb42d" (UID: "7268fa60-bc38-4264-9830-1946009bb42d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.847029 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7268fa60-bc38-4264-9830-1946009bb42d" (UID: "7268fa60-bc38-4264-9830-1946009bb42d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.873256 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-config-data" (OuterVolumeSpecName: "config-data") pod "7268fa60-bc38-4264-9830-1946009bb42d" (UID: "7268fa60-bc38-4264-9830-1946009bb42d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.876561 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnhxw\" (UniqueName: \"kubernetes.io/projected/7268fa60-bc38-4264-9830-1946009bb42d-kube-api-access-rnhxw\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.876604 4876 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.876619 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.876631 4876 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7268fa60-bc38-4264-9830-1946009bb42d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.876643 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.876657 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.876669 4876 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7268fa60-bc38-4264-9830-1946009bb42d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.972083 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:25 crc kubenswrapper[4876]: I1215 07:14:25.981612 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.047310 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:26 crc kubenswrapper[4876]: E1215 07:14:26.049164 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="proxy-httpd" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.049225 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="proxy-httpd" Dec 15 07:14:26 crc kubenswrapper[4876]: E1215 07:14:26.049255 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="ceilometer-notification-agent" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.049288 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="ceilometer-notification-agent" Dec 15 07:14:26 crc kubenswrapper[4876]: E1215 07:14:26.049307 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="ceilometer-central-agent" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.049313 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="ceilometer-central-agent" Dec 15 07:14:26 crc kubenswrapper[4876]: E1215 07:14:26.049773 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="sg-core" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.049789 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="sg-core" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.052815 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="ceilometer-notification-agent" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.052868 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="ceilometer-central-agent" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.052886 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="proxy-httpd" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.052923 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7268fa60-bc38-4264-9830-1946009bb42d" containerName="sg-core" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.060462 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.065953 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.068616 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.068894 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.182687 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-run-httpd\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.182750 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-log-httpd\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.182779 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.182803 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc4fq\" (UniqueName: \"kubernetes.io/projected/13a01103-7d15-4664-b1f9-ced426f919b6-kube-api-access-gc4fq\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.182836 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-config-data\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.182855 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-scripts\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.182898 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.285026 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-run-httpd\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.285095 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-log-httpd\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.285146 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.285174 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc4fq\" (UniqueName: \"kubernetes.io/projected/13a01103-7d15-4664-b1f9-ced426f919b6-kube-api-access-gc4fq\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.285219 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-config-data\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.285242 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-scripts\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.285319 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.285613 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-run-httpd\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.286153 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-log-httpd\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.289950 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.290182 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-scripts\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.298693 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-config-data\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.299602 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.306172 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc4fq\" (UniqueName: \"kubernetes.io/projected/13a01103-7d15-4664-b1f9-ced426f919b6-kube-api-access-gc4fq\") pod \"ceilometer-0\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.387244 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.716324 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7268fa60-bc38-4264-9830-1946009bb42d" path="/var/lib/kubelet/pods/7268fa60-bc38-4264-9830-1946009bb42d/volumes" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.761812 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.761951 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.765017 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 15 07:14:26 crc kubenswrapper[4876]: I1215 07:14:26.890065 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:14:27 crc kubenswrapper[4876]: I1215 07:14:27.658951 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13a01103-7d15-4664-b1f9-ced426f919b6","Type":"ContainerStarted","Data":"f801da5e300b3da133c3806346ba5113526c1125ad17edce852cc8fc51b38134"} Dec 15 07:14:28 crc kubenswrapper[4876]: I1215 07:14:28.672248 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13a01103-7d15-4664-b1f9-ced426f919b6","Type":"ContainerStarted","Data":"7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661"} Dec 15 07:14:28 crc kubenswrapper[4876]: I1215 07:14:28.672751 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13a01103-7d15-4664-b1f9-ced426f919b6","Type":"ContainerStarted","Data":"ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7"} Dec 15 07:14:29 crc kubenswrapper[4876]: I1215 07:14:29.684393 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13a01103-7d15-4664-b1f9-ced426f919b6","Type":"ContainerStarted","Data":"67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d"} Dec 15 07:14:31 crc kubenswrapper[4876]: I1215 07:14:31.706686 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13a01103-7d15-4664-b1f9-ced426f919b6","Type":"ContainerStarted","Data":"42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192"} Dec 15 07:14:31 crc kubenswrapper[4876]: I1215 07:14:31.707322 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 15 07:14:31 crc kubenswrapper[4876]: I1215 07:14:31.745501 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.98381132 podStartE2EDuration="6.745481059s" podCreationTimestamp="2025-12-15 07:14:25 +0000 UTC" firstStartedPulling="2025-12-15 07:14:26.905075516 +0000 UTC m=+1392.476218427" lastFinishedPulling="2025-12-15 07:14:30.666745255 +0000 UTC m=+1396.237888166" observedRunningTime="2025-12-15 07:14:31.736614583 +0000 UTC m=+1397.307757504" watchObservedRunningTime="2025-12-15 07:14:31.745481059 +0000 UTC m=+1397.316623970" Dec 15 07:14:36 crc kubenswrapper[4876]: I1215 07:14:36.768373 4876 generic.go:334] "Generic (PLEG): container finished" podID="686f7e40-e2a2-486b-83b0-4a0bc33f15e3" containerID="929292f41d8a9a781bbba6431a1271b170eeff8362239c0fd03c074a8f5532c4" exitCode=0 Dec 15 07:14:36 crc kubenswrapper[4876]: I1215 07:14:36.768474 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5sgjf" event={"ID":"686f7e40-e2a2-486b-83b0-4a0bc33f15e3","Type":"ContainerDied","Data":"929292f41d8a9a781bbba6431a1271b170eeff8362239c0fd03c074a8f5532c4"} Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.136085 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.263802 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-scripts\") pod \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.264197 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-config-data\") pod \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.264248 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k72qj\" (UniqueName: \"kubernetes.io/projected/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-kube-api-access-k72qj\") pod \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.264301 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-combined-ca-bundle\") pod \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\" (UID: \"686f7e40-e2a2-486b-83b0-4a0bc33f15e3\") " Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.271825 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-scripts" (OuterVolumeSpecName: "scripts") pod "686f7e40-e2a2-486b-83b0-4a0bc33f15e3" (UID: "686f7e40-e2a2-486b-83b0-4a0bc33f15e3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.272691 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-kube-api-access-k72qj" (OuterVolumeSpecName: "kube-api-access-k72qj") pod "686f7e40-e2a2-486b-83b0-4a0bc33f15e3" (UID: "686f7e40-e2a2-486b-83b0-4a0bc33f15e3"). InnerVolumeSpecName "kube-api-access-k72qj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.297255 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-config-data" (OuterVolumeSpecName: "config-data") pod "686f7e40-e2a2-486b-83b0-4a0bc33f15e3" (UID: "686f7e40-e2a2-486b-83b0-4a0bc33f15e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.297655 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "686f7e40-e2a2-486b-83b0-4a0bc33f15e3" (UID: "686f7e40-e2a2-486b-83b0-4a0bc33f15e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.366143 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.366181 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.366220 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k72qj\" (UniqueName: \"kubernetes.io/projected/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-kube-api-access-k72qj\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.366235 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/686f7e40-e2a2-486b-83b0-4a0bc33f15e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.793438 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5sgjf" event={"ID":"686f7e40-e2a2-486b-83b0-4a0bc33f15e3","Type":"ContainerDied","Data":"dc842d738437b20a9e9cabd1e4ffb1f86f6bd8b7bd82b9a5bde73842abbe8deb"} Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.793480 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc842d738437b20a9e9cabd1e4ffb1f86f6bd8b7bd82b9a5bde73842abbe8deb" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.793535 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5sgjf" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.940165 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 07:14:38 crc kubenswrapper[4876]: E1215 07:14:38.940674 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="686f7e40-e2a2-486b-83b0-4a0bc33f15e3" containerName="nova-cell0-conductor-db-sync" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.940698 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="686f7e40-e2a2-486b-83b0-4a0bc33f15e3" containerName="nova-cell0-conductor-db-sync" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.940942 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="686f7e40-e2a2-486b-83b0-4a0bc33f15e3" containerName="nova-cell0-conductor-db-sync" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.941740 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.944637 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-hn62j" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.946378 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 15 07:14:38 crc kubenswrapper[4876]: I1215 07:14:38.954306 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.083076 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.083262 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp9ff\" (UniqueName: \"kubernetes.io/projected/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-kube-api-access-rp9ff\") pod \"nova-cell0-conductor-0\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.083811 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.186406 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp9ff\" (UniqueName: \"kubernetes.io/projected/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-kube-api-access-rp9ff\") pod \"nova-cell0-conductor-0\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.186534 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.186606 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.200789 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.201754 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.204060 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp9ff\" (UniqueName: \"kubernetes.io/projected/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-kube-api-access-rp9ff\") pod \"nova-cell0-conductor-0\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.274193 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.724024 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 07:14:39 crc kubenswrapper[4876]: W1215 07:14:39.727226 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a01ad7f_0924_4b3d_ba95_b5e599f343ee.slice/crio-4b7607f7d670caa301b8b88efaf50178856aeda97660a02a0661fcdd231cbc96 WatchSource:0}: Error finding container 4b7607f7d670caa301b8b88efaf50178856aeda97660a02a0661fcdd231cbc96: Status 404 returned error can't find the container with id 4b7607f7d670caa301b8b88efaf50178856aeda97660a02a0661fcdd231cbc96 Dec 15 07:14:39 crc kubenswrapper[4876]: I1215 07:14:39.809330 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7a01ad7f-0924-4b3d-ba95-b5e599f343ee","Type":"ContainerStarted","Data":"4b7607f7d670caa301b8b88efaf50178856aeda97660a02a0661fcdd231cbc96"} Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.497704 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lnhph"] Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.500322 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.520150 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lnhph"] Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.633065 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-catalog-content\") pod \"redhat-marketplace-lnhph\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.633160 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-utilities\") pod \"redhat-marketplace-lnhph\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.633330 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b9df\" (UniqueName: \"kubernetes.io/projected/12105428-dcb5-404f-91d8-db10b102c067-kube-api-access-8b9df\") pod \"redhat-marketplace-lnhph\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.734484 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b9df\" (UniqueName: \"kubernetes.io/projected/12105428-dcb5-404f-91d8-db10b102c067-kube-api-access-8b9df\") pod \"redhat-marketplace-lnhph\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.734603 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-catalog-content\") pod \"redhat-marketplace-lnhph\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.734629 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-utilities\") pod \"redhat-marketplace-lnhph\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.735159 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-catalog-content\") pod \"redhat-marketplace-lnhph\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.735281 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-utilities\") pod \"redhat-marketplace-lnhph\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.771855 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b9df\" (UniqueName: \"kubernetes.io/projected/12105428-dcb5-404f-91d8-db10b102c067-kube-api-access-8b9df\") pod \"redhat-marketplace-lnhph\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.823454 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7a01ad7f-0924-4b3d-ba95-b5e599f343ee","Type":"ContainerStarted","Data":"3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951"} Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.824587 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.839882 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:40 crc kubenswrapper[4876]: I1215 07:14:40.856938 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.856914817 podStartE2EDuration="2.856914817s" podCreationTimestamp="2025-12-15 07:14:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:14:40.850884658 +0000 UTC m=+1406.422027569" watchObservedRunningTime="2025-12-15 07:14:40.856914817 +0000 UTC m=+1406.428057728" Dec 15 07:14:41 crc kubenswrapper[4876]: I1215 07:14:41.380627 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lnhph"] Dec 15 07:14:41 crc kubenswrapper[4876]: I1215 07:14:41.837091 4876 generic.go:334] "Generic (PLEG): container finished" podID="12105428-dcb5-404f-91d8-db10b102c067" containerID="c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7" exitCode=0 Dec 15 07:14:41 crc kubenswrapper[4876]: I1215 07:14:41.837179 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lnhph" event={"ID":"12105428-dcb5-404f-91d8-db10b102c067","Type":"ContainerDied","Data":"c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7"} Dec 15 07:14:41 crc kubenswrapper[4876]: I1215 07:14:41.837293 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lnhph" event={"ID":"12105428-dcb5-404f-91d8-db10b102c067","Type":"ContainerStarted","Data":"1c1c9d9859c6bf88bce69b45da9ba79035110bd52d5b4d744998ad9d3d864ab6"} Dec 15 07:14:43 crc kubenswrapper[4876]: I1215 07:14:43.856807 4876 generic.go:334] "Generic (PLEG): container finished" podID="12105428-dcb5-404f-91d8-db10b102c067" containerID="279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466" exitCode=0 Dec 15 07:14:43 crc kubenswrapper[4876]: I1215 07:14:43.856880 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lnhph" event={"ID":"12105428-dcb5-404f-91d8-db10b102c067","Type":"ContainerDied","Data":"279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466"} Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.314645 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.791535 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-lld2z"] Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.792927 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.794884 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.795343 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.803886 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-lld2z"] Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.881242 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lnhph" event={"ID":"12105428-dcb5-404f-91d8-db10b102c067","Type":"ContainerStarted","Data":"47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381"} Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.905194 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lnhph" podStartSLOduration=2.421583916 podStartE2EDuration="4.905179392s" podCreationTimestamp="2025-12-15 07:14:40 +0000 UTC" firstStartedPulling="2025-12-15 07:14:41.840082398 +0000 UTC m=+1407.411225339" lastFinishedPulling="2025-12-15 07:14:44.323677904 +0000 UTC m=+1409.894820815" observedRunningTime="2025-12-15 07:14:44.901534635 +0000 UTC m=+1410.472677556" watchObservedRunningTime="2025-12-15 07:14:44.905179392 +0000 UTC m=+1410.476322303" Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.928160 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-config-data\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.928243 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.928269 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6vvs\" (UniqueName: \"kubernetes.io/projected/c7cb8571-8a9d-469b-af65-6dfd59cafeab-kube-api-access-s6vvs\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.928285 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-scripts\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.982168 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.984031 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.989999 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:14:44 crc kubenswrapper[4876]: I1215 07:14:44.993393 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.031285 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-config-data\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.031384 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.031417 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6vvs\" (UniqueName: \"kubernetes.io/projected/c7cb8571-8a9d-469b-af65-6dfd59cafeab-kube-api-access-s6vvs\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.031448 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-scripts\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.040285 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-scripts\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.052782 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-config-data\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.053282 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.081750 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.083458 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.084602 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6vvs\" (UniqueName: \"kubernetes.io/projected/c7cb8571-8a9d-469b-af65-6dfd59cafeab-kube-api-access-s6vvs\") pod \"nova-cell0-cell-mapping-lld2z\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.091739 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.122555 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.129562 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.135133 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.135485 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/610c73db-685d-4269-8965-0605b4c2c007-logs\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.135638 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvbhk\" (UniqueName: \"kubernetes.io/projected/610c73db-685d-4269-8965-0605b4c2c007-kube-api-access-rvbhk\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.135767 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-config-data\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.159660 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.160982 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.168834 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.188178 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.240972 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/610c73db-685d-4269-8965-0605b4c2c007-logs\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.241268 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgh4j\" (UniqueName: \"kubernetes.io/projected/1252b5d2-4d38-4d18-8614-70ec63b93b94-kube-api-access-lgh4j\") pod \"nova-scheduler-0\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.241383 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmbnn\" (UniqueName: \"kubernetes.io/projected/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-kube-api-access-hmbnn\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.241505 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvbhk\" (UniqueName: \"kubernetes.io/projected/610c73db-685d-4269-8965-0605b4c2c007-kube-api-access-rvbhk\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.241619 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.242529 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-config-data\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.242653 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-logs\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.242800 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-config-data\") pod \"nova-scheduler-0\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.242972 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.243132 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-config-data\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.243252 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.242575 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/610c73db-685d-4269-8965-0605b4c2c007-logs\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.261697 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.263997 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-config-data\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.276073 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.277639 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.284496 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.285952 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvbhk\" (UniqueName: \"kubernetes.io/projected/610c73db-685d-4269-8965-0605b4c2c007-kube-api-access-rvbhk\") pod \"nova-api-0\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.299221 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-557bbc7df7-cgl7f"] Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.302812 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.320023 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.333351 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344070 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-config-data\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344144 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-config\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344177 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344234 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgh4j\" (UniqueName: \"kubernetes.io/projected/1252b5d2-4d38-4d18-8614-70ec63b93b94-kube-api-access-lgh4j\") pod \"nova-scheduler-0\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344263 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmbnn\" (UniqueName: \"kubernetes.io/projected/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-kube-api-access-hmbnn\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344293 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344312 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-nb\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344389 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344412 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-sb\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344453 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-swift-storage-0\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344490 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-logs\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344529 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brpjk\" (UniqueName: \"kubernetes.io/projected/e5cde5f5-9092-4cc3-9129-e303d0a4567a-kube-api-access-brpjk\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344556 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-config-data\") pod \"nova-scheduler-0\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344580 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g65x8\" (UniqueName: \"kubernetes.io/projected/de596f2b-465e-4b52-9b25-c41022b105b1-kube-api-access-g65x8\") pod \"nova-cell1-novncproxy-0\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344618 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-svc\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.344674 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.345490 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-logs\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.351975 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.356145 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-config-data\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.357660 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.361040 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-config-data\") pod \"nova-scheduler-0\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.387933 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgh4j\" (UniqueName: \"kubernetes.io/projected/1252b5d2-4d38-4d18-8614-70ec63b93b94-kube-api-access-lgh4j\") pod \"nova-scheduler-0\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.395976 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmbnn\" (UniqueName: \"kubernetes.io/projected/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-kube-api-access-hmbnn\") pod \"nova-metadata-0\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.414896 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-557bbc7df7-cgl7f"] Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.452195 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-swift-storage-0\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.452264 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brpjk\" (UniqueName: \"kubernetes.io/projected/e5cde5f5-9092-4cc3-9129-e303d0a4567a-kube-api-access-brpjk\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.452298 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g65x8\" (UniqueName: \"kubernetes.io/projected/de596f2b-465e-4b52-9b25-c41022b105b1-kube-api-access-g65x8\") pod \"nova-cell1-novncproxy-0\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.452329 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-svc\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.452374 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.452416 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-config\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.452498 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.452515 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-nb\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.452543 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-sb\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.454074 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-sb\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.454236 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-swift-storage-0\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.454927 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-config\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.455454 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-nb\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.455705 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-svc\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.465141 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.467436 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.470695 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g65x8\" (UniqueName: \"kubernetes.io/projected/de596f2b-465e-4b52-9b25-c41022b105b1-kube-api-access-g65x8\") pod \"nova-cell1-novncproxy-0\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.470993 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brpjk\" (UniqueName: \"kubernetes.io/projected/e5cde5f5-9092-4cc3-9129-e303d0a4567a-kube-api-access-brpjk\") pod \"dnsmasq-dns-557bbc7df7-cgl7f\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.562562 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.620491 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.653306 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.678540 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:45 crc kubenswrapper[4876]: I1215 07:14:45.846886 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-lld2z"] Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.093949 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.264448 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-557bbc7df7-cgl7f"] Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.558991 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:14:46 crc kubenswrapper[4876]: W1215 07:14:46.564370 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde596f2b_465e_4b52_9b25_c41022b105b1.slice/crio-fdfeaada17211114f431ef6ead457d453bf968e970d5abc293f758c7bbc32b83 WatchSource:0}: Error finding container fdfeaada17211114f431ef6ead457d453bf968e970d5abc293f758c7bbc32b83: Status 404 returned error can't find the container with id fdfeaada17211114f431ef6ead457d453bf968e970d5abc293f758c7bbc32b83 Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.677781 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bgdfk"] Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.679317 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.682778 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.682964 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.692518 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bgdfk"] Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.698570 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.734559 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.782730 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-scripts\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.782880 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.782922 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-config-data\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.783241 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bksd\" (UniqueName: \"kubernetes.io/projected/61240875-a8dd-44c2-9082-f9270de53161-kube-api-access-7bksd\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.885113 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bksd\" (UniqueName: \"kubernetes.io/projected/61240875-a8dd-44c2-9082-f9270de53161-kube-api-access-7bksd\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.885213 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-scripts\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.885323 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.885359 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-config-data\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.893094 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.893919 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-config-data\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.894725 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-scripts\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.908132 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bksd\" (UniqueName: \"kubernetes.io/projected/61240875-a8dd-44c2-9082-f9270de53161-kube-api-access-7bksd\") pod \"nova-cell1-conductor-db-sync-bgdfk\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.926256 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1252b5d2-4d38-4d18-8614-70ec63b93b94","Type":"ContainerStarted","Data":"60d6e85c1c4890832bfc9472c5eb43d4c04be81710c984235d89b93063cba4ec"} Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.927770 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4","Type":"ContainerStarted","Data":"24d45ac9ade612ec13981bc8f1045ed00d9d213d9975d12d4184bbcec961f920"} Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.935221 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"610c73db-685d-4269-8965-0605b4c2c007","Type":"ContainerStarted","Data":"8e5688e77da66d3c3b6f05d9be0ecb8f330111d673a34740a49dae117a678cbc"} Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.939382 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-lld2z" event={"ID":"c7cb8571-8a9d-469b-af65-6dfd59cafeab","Type":"ContainerStarted","Data":"9358846d442c82631bff2771fc2bcff89a0df61ba8e8aa255a9bf5adbd4fec9c"} Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.939428 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-lld2z" event={"ID":"c7cb8571-8a9d-469b-af65-6dfd59cafeab","Type":"ContainerStarted","Data":"d2b9e065f51aca224efc6a50828952bb0a9c687c46258da8c7fea37910480614"} Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.941309 4876 generic.go:334] "Generic (PLEG): container finished" podID="e5cde5f5-9092-4cc3-9129-e303d0a4567a" containerID="72506abee5e53bf8629d716ec2a45779f4d48bf8a3fb87fc54c11aad761fb1de" exitCode=0 Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.941356 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" event={"ID":"e5cde5f5-9092-4cc3-9129-e303d0a4567a","Type":"ContainerDied","Data":"72506abee5e53bf8629d716ec2a45779f4d48bf8a3fb87fc54c11aad761fb1de"} Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.941374 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" event={"ID":"e5cde5f5-9092-4cc3-9129-e303d0a4567a","Type":"ContainerStarted","Data":"75a931914f1eb45c52a2d53ecc5fdf2b65f2f0479d547dfe8c41be213aae451b"} Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.947229 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"de596f2b-465e-4b52-9b25-c41022b105b1","Type":"ContainerStarted","Data":"fdfeaada17211114f431ef6ead457d453bf968e970d5abc293f758c7bbc32b83"} Dec 15 07:14:46 crc kubenswrapper[4876]: I1215 07:14:46.974853 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-lld2z" podStartSLOduration=2.97483421 podStartE2EDuration="2.97483421s" podCreationTimestamp="2025-12-15 07:14:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:14:46.959304219 +0000 UTC m=+1412.530447130" watchObservedRunningTime="2025-12-15 07:14:46.97483421 +0000 UTC m=+1412.545977121" Dec 15 07:14:47 crc kubenswrapper[4876]: I1215 07:14:47.117742 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:47 crc kubenswrapper[4876]: I1215 07:14:47.602990 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bgdfk"] Dec 15 07:14:47 crc kubenswrapper[4876]: I1215 07:14:47.955925 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bgdfk" event={"ID":"61240875-a8dd-44c2-9082-f9270de53161","Type":"ContainerStarted","Data":"266b115ff9869144a93e2d16d75e5aa44cf4050ac7af88dd9b06571e1851f9c3"} Dec 15 07:14:47 crc kubenswrapper[4876]: I1215 07:14:47.955968 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bgdfk" event={"ID":"61240875-a8dd-44c2-9082-f9270de53161","Type":"ContainerStarted","Data":"f0e096aec0c5fcb0cc07c9ed512c4a038f7036ea724211e72f73c452601b8c6e"} Dec 15 07:14:47 crc kubenswrapper[4876]: I1215 07:14:47.960058 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" event={"ID":"e5cde5f5-9092-4cc3-9129-e303d0a4567a","Type":"ContainerStarted","Data":"dfacf79125cbb126071908d81e1cab21e36699401feb61661cb7dae4abf7e102"} Dec 15 07:14:47 crc kubenswrapper[4876]: I1215 07:14:47.974397 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-bgdfk" podStartSLOduration=1.974378034 podStartE2EDuration="1.974378034s" podCreationTimestamp="2025-12-15 07:14:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:14:47.969845834 +0000 UTC m=+1413.540988745" watchObservedRunningTime="2025-12-15 07:14:47.974378034 +0000 UTC m=+1413.545520945" Dec 15 07:14:48 crc kubenswrapper[4876]: I1215 07:14:48.000883 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" podStartSLOduration=3.000863586 podStartE2EDuration="3.000863586s" podCreationTimestamp="2025-12-15 07:14:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:14:47.986375072 +0000 UTC m=+1413.557517983" watchObservedRunningTime="2025-12-15 07:14:48.000863586 +0000 UTC m=+1413.572006507" Dec 15 07:14:48 crc kubenswrapper[4876]: I1215 07:14:48.980866 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:49 crc kubenswrapper[4876]: I1215 07:14:49.577903 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:14:49 crc kubenswrapper[4876]: I1215 07:14:49.586516 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:49 crc kubenswrapper[4876]: I1215 07:14:49.991879 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4","Type":"ContainerStarted","Data":"d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a"} Dec 15 07:14:49 crc kubenswrapper[4876]: I1215 07:14:49.993428 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"610c73db-685d-4269-8965-0605b4c2c007","Type":"ContainerStarted","Data":"6cc63ffbc17cf06b8d4cfe4ff0c86de11bdeacd322b606d8def30aa2fc2fd3bb"} Dec 15 07:14:49 crc kubenswrapper[4876]: I1215 07:14:49.994467 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"de596f2b-465e-4b52-9b25-c41022b105b1","Type":"ContainerStarted","Data":"6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c"} Dec 15 07:14:49 crc kubenswrapper[4876]: I1215 07:14:49.994575 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="de596f2b-465e-4b52-9b25-c41022b105b1" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c" gracePeriod=30 Dec 15 07:14:50 crc kubenswrapper[4876]: I1215 07:14:50.001456 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1252b5d2-4d38-4d18-8614-70ec63b93b94","Type":"ContainerStarted","Data":"c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d"} Dec 15 07:14:50 crc kubenswrapper[4876]: I1215 07:14:50.019315 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.964620821 podStartE2EDuration="5.019297708s" podCreationTimestamp="2025-12-15 07:14:45 +0000 UTC" firstStartedPulling="2025-12-15 07:14:46.568569646 +0000 UTC m=+1412.139712557" lastFinishedPulling="2025-12-15 07:14:49.623246533 +0000 UTC m=+1415.194389444" observedRunningTime="2025-12-15 07:14:50.011330426 +0000 UTC m=+1415.582473337" watchObservedRunningTime="2025-12-15 07:14:50.019297708 +0000 UTC m=+1415.590440619" Dec 15 07:14:50 crc kubenswrapper[4876]: I1215 07:14:50.036197 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.15330274 podStartE2EDuration="5.036179325s" podCreationTimestamp="2025-12-15 07:14:45 +0000 UTC" firstStartedPulling="2025-12-15 07:14:46.766287795 +0000 UTC m=+1412.337430706" lastFinishedPulling="2025-12-15 07:14:49.64916438 +0000 UTC m=+1415.220307291" observedRunningTime="2025-12-15 07:14:50.028850141 +0000 UTC m=+1415.599993062" watchObservedRunningTime="2025-12-15 07:14:50.036179325 +0000 UTC m=+1415.607322236" Dec 15 07:14:50 crc kubenswrapper[4876]: I1215 07:14:50.654415 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 15 07:14:50 crc kubenswrapper[4876]: I1215 07:14:50.680196 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:14:50 crc kubenswrapper[4876]: I1215 07:14:50.840414 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:50 crc kubenswrapper[4876]: I1215 07:14:50.840482 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:50 crc kubenswrapper[4876]: I1215 07:14:50.901010 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.009703 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4","Type":"ContainerStarted","Data":"ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e"} Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.010745 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" containerName="nova-metadata-log" containerID="cri-o://d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a" gracePeriod=30 Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.010913 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" containerName="nova-metadata-metadata" containerID="cri-o://ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e" gracePeriod=30 Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.012507 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"610c73db-685d-4269-8965-0605b4c2c007","Type":"ContainerStarted","Data":"02ef34f441f26ba22d9ff427cfde8245b7c822cab5bec064cb00680a51445cfc"} Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.043542 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.143392704 podStartE2EDuration="6.043504116s" podCreationTimestamp="2025-12-15 07:14:45 +0000 UTC" firstStartedPulling="2025-12-15 07:14:46.722020931 +0000 UTC m=+1412.293163842" lastFinishedPulling="2025-12-15 07:14:49.622132343 +0000 UTC m=+1415.193275254" observedRunningTime="2025-12-15 07:14:51.033363986 +0000 UTC m=+1416.604506897" watchObservedRunningTime="2025-12-15 07:14:51.043504116 +0000 UTC m=+1416.614647057" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.067942 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.6404913370000003 podStartE2EDuration="7.067885971s" podCreationTimestamp="2025-12-15 07:14:44 +0000 UTC" firstStartedPulling="2025-12-15 07:14:46.19437826 +0000 UTC m=+1411.765521161" lastFinishedPulling="2025-12-15 07:14:49.621772884 +0000 UTC m=+1415.192915795" observedRunningTime="2025-12-15 07:14:51.060515506 +0000 UTC m=+1416.631658417" watchObservedRunningTime="2025-12-15 07:14:51.067885971 +0000 UTC m=+1416.639028882" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.098675 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.154013 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lnhph"] Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.678263 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.795334 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-combined-ca-bundle\") pod \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.795442 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmbnn\" (UniqueName: \"kubernetes.io/projected/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-kube-api-access-hmbnn\") pod \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.796246 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-logs\") pod \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.796341 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-config-data\") pod \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\" (UID: \"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4\") " Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.796828 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-logs" (OuterVolumeSpecName: "logs") pod "01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" (UID: "01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.802591 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-kube-api-access-hmbnn" (OuterVolumeSpecName: "kube-api-access-hmbnn") pod "01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" (UID: "01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4"). InnerVolumeSpecName "kube-api-access-hmbnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.832304 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" (UID: "01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.833900 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-config-data" (OuterVolumeSpecName: "config-data") pod "01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" (UID: "01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.899293 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.899342 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.899359 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:51 crc kubenswrapper[4876]: I1215 07:14:51.899378 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmbnn\" (UniqueName: \"kubernetes.io/projected/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4-kube-api-access-hmbnn\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.020819 4876 generic.go:334] "Generic (PLEG): container finished" podID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" containerID="ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e" exitCode=0 Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.021208 4876 generic.go:334] "Generic (PLEG): container finished" podID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" containerID="d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a" exitCode=143 Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.020866 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.020886 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4","Type":"ContainerDied","Data":"ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e"} Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.021448 4876 scope.go:117] "RemoveContainer" containerID="ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.022674 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4","Type":"ContainerDied","Data":"d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a"} Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.022718 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4","Type":"ContainerDied","Data":"24d45ac9ade612ec13981bc8f1045ed00d9d213d9975d12d4184bbcec961f920"} Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.052501 4876 scope.go:117] "RemoveContainer" containerID="d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.053721 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.061489 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.079834 4876 scope.go:117] "RemoveContainer" containerID="ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.079918 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:52 crc kubenswrapper[4876]: E1215 07:14:52.080309 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" containerName="nova-metadata-metadata" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.080332 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" containerName="nova-metadata-metadata" Dec 15 07:14:52 crc kubenswrapper[4876]: E1215 07:14:52.080344 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" containerName="nova-metadata-log" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.080351 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" containerName="nova-metadata-log" Dec 15 07:14:52 crc kubenswrapper[4876]: E1215 07:14:52.080475 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e\": container with ID starting with ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e not found: ID does not exist" containerID="ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.080512 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" containerName="nova-metadata-log" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.080528 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" containerName="nova-metadata-metadata" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.080515 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e"} err="failed to get container status \"ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e\": rpc error: code = NotFound desc = could not find container \"ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e\": container with ID starting with ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e not found: ID does not exist" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.080546 4876 scope.go:117] "RemoveContainer" containerID="d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a" Dec 15 07:14:52 crc kubenswrapper[4876]: E1215 07:14:52.081032 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a\": container with ID starting with d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a not found: ID does not exist" containerID="d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.081057 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a"} err="failed to get container status \"d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a\": rpc error: code = NotFound desc = could not find container \"d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a\": container with ID starting with d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a not found: ID does not exist" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.081077 4876 scope.go:117] "RemoveContainer" containerID="ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.081317 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e"} err="failed to get container status \"ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e\": rpc error: code = NotFound desc = could not find container \"ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e\": container with ID starting with ef2fab90249563dba3ae6b1bda19c5b1e1af796b10cc919de8cabcd0148b9c6e not found: ID does not exist" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.081339 4876 scope.go:117] "RemoveContainer" containerID="d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.081649 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.082577 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a"} err="failed to get container status \"d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a\": rpc error: code = NotFound desc = could not find container \"d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a\": container with ID starting with d1ab8b737ef75b9ccb52114c005cbf2f591bc6616ece1cbffa980f69bb791b5a not found: ID does not exist" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.087322 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.087939 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.090974 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.213445 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcsw8\" (UniqueName: \"kubernetes.io/projected/3c9a4f28-06df-4a38-8b65-40aa46ec0340-kube-api-access-dcsw8\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.213488 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c9a4f28-06df-4a38-8b65-40aa46ec0340-logs\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.213880 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.213996 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-config-data\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.214027 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.315711 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.315782 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcsw8\" (UniqueName: \"kubernetes.io/projected/3c9a4f28-06df-4a38-8b65-40aa46ec0340-kube-api-access-dcsw8\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.315811 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c9a4f28-06df-4a38-8b65-40aa46ec0340-logs\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.315938 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.315988 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-config-data\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.316505 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c9a4f28-06df-4a38-8b65-40aa46ec0340-logs\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.320160 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.323062 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-config-data\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.323707 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.337147 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcsw8\" (UniqueName: \"kubernetes.io/projected/3c9a4f28-06df-4a38-8b65-40aa46ec0340-kube-api-access-dcsw8\") pod \"nova-metadata-0\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.402584 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.719495 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4" path="/var/lib/kubelet/pods/01c7fc05-bef5-45e3-9b3e-aa1ca06c4de4/volumes" Dec 15 07:14:52 crc kubenswrapper[4876]: I1215 07:14:52.915091 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:52 crc kubenswrapper[4876]: W1215 07:14:52.922328 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c9a4f28_06df_4a38_8b65_40aa46ec0340.slice/crio-1034d14ece55b67d118af4d9312d0d0397637e8e1a87fd204744fdf1acc1398a WatchSource:0}: Error finding container 1034d14ece55b67d118af4d9312d0d0397637e8e1a87fd204744fdf1acc1398a: Status 404 returned error can't find the container with id 1034d14ece55b67d118af4d9312d0d0397637e8e1a87fd204744fdf1acc1398a Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.030497 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c9a4f28-06df-4a38-8b65-40aa46ec0340","Type":"ContainerStarted","Data":"1034d14ece55b67d118af4d9312d0d0397637e8e1a87fd204744fdf1acc1398a"} Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.030643 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lnhph" podUID="12105428-dcb5-404f-91d8-db10b102c067" containerName="registry-server" containerID="cri-o://47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381" gracePeriod=2 Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.453651 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.542542 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b9df\" (UniqueName: \"kubernetes.io/projected/12105428-dcb5-404f-91d8-db10b102c067-kube-api-access-8b9df\") pod \"12105428-dcb5-404f-91d8-db10b102c067\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.542892 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-catalog-content\") pod \"12105428-dcb5-404f-91d8-db10b102c067\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.542942 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-utilities\") pod \"12105428-dcb5-404f-91d8-db10b102c067\" (UID: \"12105428-dcb5-404f-91d8-db10b102c067\") " Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.543788 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-utilities" (OuterVolumeSpecName: "utilities") pod "12105428-dcb5-404f-91d8-db10b102c067" (UID: "12105428-dcb5-404f-91d8-db10b102c067"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.546502 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12105428-dcb5-404f-91d8-db10b102c067-kube-api-access-8b9df" (OuterVolumeSpecName: "kube-api-access-8b9df") pod "12105428-dcb5-404f-91d8-db10b102c067" (UID: "12105428-dcb5-404f-91d8-db10b102c067"). InnerVolumeSpecName "kube-api-access-8b9df". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.561710 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12105428-dcb5-404f-91d8-db10b102c067" (UID: "12105428-dcb5-404f-91d8-db10b102c067"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.645423 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b9df\" (UniqueName: \"kubernetes.io/projected/12105428-dcb5-404f-91d8-db10b102c067-kube-api-access-8b9df\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.645680 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:53 crc kubenswrapper[4876]: I1215 07:14:53.645802 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12105428-dcb5-404f-91d8-db10b102c067-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.038784 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c9a4f28-06df-4a38-8b65-40aa46ec0340","Type":"ContainerStarted","Data":"d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7"} Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.038829 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c9a4f28-06df-4a38-8b65-40aa46ec0340","Type":"ContainerStarted","Data":"e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899"} Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.041263 4876 generic.go:334] "Generic (PLEG): container finished" podID="12105428-dcb5-404f-91d8-db10b102c067" containerID="47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381" exitCode=0 Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.041297 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lnhph" event={"ID":"12105428-dcb5-404f-91d8-db10b102c067","Type":"ContainerDied","Data":"47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381"} Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.041317 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lnhph" event={"ID":"12105428-dcb5-404f-91d8-db10b102c067","Type":"ContainerDied","Data":"1c1c9d9859c6bf88bce69b45da9ba79035110bd52d5b4d744998ad9d3d864ab6"} Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.041337 4876 scope.go:117] "RemoveContainer" containerID="47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.041506 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lnhph" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.070473 4876 scope.go:117] "RemoveContainer" containerID="279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.077042 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.077016282 podStartE2EDuration="2.077016282s" podCreationTimestamp="2025-12-15 07:14:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:14:54.058806629 +0000 UTC m=+1419.629949560" watchObservedRunningTime="2025-12-15 07:14:54.077016282 +0000 UTC m=+1419.648159193" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.107529 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lnhph"] Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.110309 4876 scope.go:117] "RemoveContainer" containerID="c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.119739 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lnhph"] Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.152374 4876 scope.go:117] "RemoveContainer" containerID="47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381" Dec 15 07:14:54 crc kubenswrapper[4876]: E1215 07:14:54.152808 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381\": container with ID starting with 47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381 not found: ID does not exist" containerID="47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.152866 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381"} err="failed to get container status \"47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381\": rpc error: code = NotFound desc = could not find container \"47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381\": container with ID starting with 47291ccba48bb0b1396b0eddb275385bf6f91c92843c941fb9b54ea97893d381 not found: ID does not exist" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.152901 4876 scope.go:117] "RemoveContainer" containerID="279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466" Dec 15 07:14:54 crc kubenswrapper[4876]: E1215 07:14:54.153357 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466\": container with ID starting with 279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466 not found: ID does not exist" containerID="279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.153398 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466"} err="failed to get container status \"279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466\": rpc error: code = NotFound desc = could not find container \"279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466\": container with ID starting with 279fdf079cd0c6764b15d8c50eeff9ec76145d1623e34d75ff63d1c6af41a466 not found: ID does not exist" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.153425 4876 scope.go:117] "RemoveContainer" containerID="c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7" Dec 15 07:14:54 crc kubenswrapper[4876]: E1215 07:14:54.153673 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7\": container with ID starting with c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7 not found: ID does not exist" containerID="c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.153698 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7"} err="failed to get container status \"c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7\": rpc error: code = NotFound desc = could not find container \"c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7\": container with ID starting with c804a8060834160cdc7ad331c50d15aa3c4938d5b34f413115ffe953f8f6b7b7 not found: ID does not exist" Dec 15 07:14:54 crc kubenswrapper[4876]: I1215 07:14:54.724958 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12105428-dcb5-404f-91d8-db10b102c067" path="/var/lib/kubelet/pods/12105428-dcb5-404f-91d8-db10b102c067/volumes" Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.069505 4876 generic.go:334] "Generic (PLEG): container finished" podID="c7cb8571-8a9d-469b-af65-6dfd59cafeab" containerID="9358846d442c82631bff2771fc2bcff89a0df61ba8e8aa255a9bf5adbd4fec9c" exitCode=0 Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.069638 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-lld2z" event={"ID":"c7cb8571-8a9d-469b-af65-6dfd59cafeab","Type":"ContainerDied","Data":"9358846d442c82631bff2771fc2bcff89a0df61ba8e8aa255a9bf5adbd4fec9c"} Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.076074 4876 generic.go:334] "Generic (PLEG): container finished" podID="61240875-a8dd-44c2-9082-f9270de53161" containerID="266b115ff9869144a93e2d16d75e5aa44cf4050ac7af88dd9b06571e1851f9c3" exitCode=0 Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.076164 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bgdfk" event={"ID":"61240875-a8dd-44c2-9082-f9270de53161","Type":"ContainerDied","Data":"266b115ff9869144a93e2d16d75e5aa44cf4050ac7af88dd9b06571e1851f9c3"} Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.320374 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.320692 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.564455 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.653980 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75bfc9b94f-sh4l9"] Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.654290 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" podUID="3224887d-8b3b-4228-814d-53e35a24b517" containerName="dnsmasq-dns" containerID="cri-o://f91bf6d15bf58d2343f264ef0f114bf34a71722864c55ea9bb590968e756e882" gracePeriod=10 Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.654304 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 15 07:14:55 crc kubenswrapper[4876]: I1215 07:14:55.706770 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.096172 4876 generic.go:334] "Generic (PLEG): container finished" podID="3224887d-8b3b-4228-814d-53e35a24b517" containerID="f91bf6d15bf58d2343f264ef0f114bf34a71722864c55ea9bb590968e756e882" exitCode=0 Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.096259 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" event={"ID":"3224887d-8b3b-4228-814d-53e35a24b517","Type":"ContainerDied","Data":"f91bf6d15bf58d2343f264ef0f114bf34a71722864c55ea9bb590968e756e882"} Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.153702 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.214209 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.328778 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-sb\") pod \"3224887d-8b3b-4228-814d-53e35a24b517\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.328845 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-swift-storage-0\") pod \"3224887d-8b3b-4228-814d-53e35a24b517\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.328908 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-config\") pod \"3224887d-8b3b-4228-814d-53e35a24b517\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.328951 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-nb\") pod \"3224887d-8b3b-4228-814d-53e35a24b517\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.328976 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smdmn\" (UniqueName: \"kubernetes.io/projected/3224887d-8b3b-4228-814d-53e35a24b517-kube-api-access-smdmn\") pod \"3224887d-8b3b-4228-814d-53e35a24b517\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.329051 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-svc\") pod \"3224887d-8b3b-4228-814d-53e35a24b517\" (UID: \"3224887d-8b3b-4228-814d-53e35a24b517\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.353333 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3224887d-8b3b-4228-814d-53e35a24b517-kube-api-access-smdmn" (OuterVolumeSpecName: "kube-api-access-smdmn") pod "3224887d-8b3b-4228-814d-53e35a24b517" (UID: "3224887d-8b3b-4228-814d-53e35a24b517"). InnerVolumeSpecName "kube-api-access-smdmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.409859 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="610c73db-685d-4269-8965-0605b4c2c007" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.409950 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="610c73db-685d-4269-8965-0605b4c2c007" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.421134 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-config" (OuterVolumeSpecName: "config") pod "3224887d-8b3b-4228-814d-53e35a24b517" (UID: "3224887d-8b3b-4228-814d-53e35a24b517"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.423794 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.431379 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.431418 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smdmn\" (UniqueName: \"kubernetes.io/projected/3224887d-8b3b-4228-814d-53e35a24b517-kube-api-access-smdmn\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.455750 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3224887d-8b3b-4228-814d-53e35a24b517" (UID: "3224887d-8b3b-4228-814d-53e35a24b517"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.483404 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3224887d-8b3b-4228-814d-53e35a24b517" (UID: "3224887d-8b3b-4228-814d-53e35a24b517"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.515915 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.539040 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.539081 4876 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.552640 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3224887d-8b3b-4228-814d-53e35a24b517" (UID: "3224887d-8b3b-4228-814d-53e35a24b517"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.552936 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3224887d-8b3b-4228-814d-53e35a24b517" (UID: "3224887d-8b3b-4228-814d-53e35a24b517"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.579964 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.639951 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-combined-ca-bundle\") pod \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.640794 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-scripts\") pod \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.642193 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-config-data\") pod \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.642342 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6vvs\" (UniqueName: \"kubernetes.io/projected/c7cb8571-8a9d-469b-af65-6dfd59cafeab-kube-api-access-s6vvs\") pod \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\" (UID: \"c7cb8571-8a9d-469b-af65-6dfd59cafeab\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.643012 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.643127 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3224887d-8b3b-4228-814d-53e35a24b517-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.645219 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-scripts" (OuterVolumeSpecName: "scripts") pod "c7cb8571-8a9d-469b-af65-6dfd59cafeab" (UID: "c7cb8571-8a9d-469b-af65-6dfd59cafeab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.648433 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7cb8571-8a9d-469b-af65-6dfd59cafeab-kube-api-access-s6vvs" (OuterVolumeSpecName: "kube-api-access-s6vvs") pod "c7cb8571-8a9d-469b-af65-6dfd59cafeab" (UID: "c7cb8571-8a9d-469b-af65-6dfd59cafeab"). InnerVolumeSpecName "kube-api-access-s6vvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.680176 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-config-data" (OuterVolumeSpecName: "config-data") pod "c7cb8571-8a9d-469b-af65-6dfd59cafeab" (UID: "c7cb8571-8a9d-469b-af65-6dfd59cafeab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.681934 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7cb8571-8a9d-469b-af65-6dfd59cafeab" (UID: "c7cb8571-8a9d-469b-af65-6dfd59cafeab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.745029 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-config-data\") pod \"61240875-a8dd-44c2-9082-f9270de53161\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.745601 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-combined-ca-bundle\") pod \"61240875-a8dd-44c2-9082-f9270de53161\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.745737 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-scripts\") pod \"61240875-a8dd-44c2-9082-f9270de53161\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.745924 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bksd\" (UniqueName: \"kubernetes.io/projected/61240875-a8dd-44c2-9082-f9270de53161-kube-api-access-7bksd\") pod \"61240875-a8dd-44c2-9082-f9270de53161\" (UID: \"61240875-a8dd-44c2-9082-f9270de53161\") " Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.746561 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.746643 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6vvs\" (UniqueName: \"kubernetes.io/projected/c7cb8571-8a9d-469b-af65-6dfd59cafeab-kube-api-access-s6vvs\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.746759 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.746833 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7cb8571-8a9d-469b-af65-6dfd59cafeab-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.762364 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-scripts" (OuterVolumeSpecName: "scripts") pod "61240875-a8dd-44c2-9082-f9270de53161" (UID: "61240875-a8dd-44c2-9082-f9270de53161"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.775558 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61240875-a8dd-44c2-9082-f9270de53161-kube-api-access-7bksd" (OuterVolumeSpecName: "kube-api-access-7bksd") pod "61240875-a8dd-44c2-9082-f9270de53161" (UID: "61240875-a8dd-44c2-9082-f9270de53161"). InnerVolumeSpecName "kube-api-access-7bksd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.775736 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-config-data" (OuterVolumeSpecName: "config-data") pod "61240875-a8dd-44c2-9082-f9270de53161" (UID: "61240875-a8dd-44c2-9082-f9270de53161"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.778566 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "61240875-a8dd-44c2-9082-f9270de53161" (UID: "61240875-a8dd-44c2-9082-f9270de53161"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.849698 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.849763 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.849783 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61240875-a8dd-44c2-9082-f9270de53161-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:56 crc kubenswrapper[4876]: I1215 07:14:56.849800 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bksd\" (UniqueName: \"kubernetes.io/projected/61240875-a8dd-44c2-9082-f9270de53161-kube-api-access-7bksd\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.108395 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bgdfk" event={"ID":"61240875-a8dd-44c2-9082-f9270de53161","Type":"ContainerDied","Data":"f0e096aec0c5fcb0cc07c9ed512c4a038f7036ea724211e72f73c452601b8c6e"} Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.108441 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0e096aec0c5fcb0cc07c9ed512c4a038f7036ea724211e72f73c452601b8c6e" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.108504 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bgdfk" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.111293 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-lld2z" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.111289 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-lld2z" event={"ID":"c7cb8571-8a9d-469b-af65-6dfd59cafeab","Type":"ContainerDied","Data":"d2b9e065f51aca224efc6a50828952bb0a9c687c46258da8c7fea37910480614"} Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.111446 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2b9e065f51aca224efc6a50828952bb0a9c687c46258da8c7fea37910480614" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.125430 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.125494 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75bfc9b94f-sh4l9" event={"ID":"3224887d-8b3b-4228-814d-53e35a24b517","Type":"ContainerDied","Data":"093151b45fdd74ae41d6e69182eede76d59b42e63e156f04e14a8be292885f92"} Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.125555 4876 scope.go:117] "RemoveContainer" containerID="f91bf6d15bf58d2343f264ef0f114bf34a71722864c55ea9bb590968e756e882" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.169289 4876 scope.go:117] "RemoveContainer" containerID="435b67555b9c61685a3d9d0b17db2238a2413690482fe86306938f6d37e1d6fd" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.178320 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75bfc9b94f-sh4l9"] Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.216883 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75bfc9b94f-sh4l9"] Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.241683 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 07:14:57 crc kubenswrapper[4876]: E1215 07:14:57.242063 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3224887d-8b3b-4228-814d-53e35a24b517" containerName="init" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242086 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3224887d-8b3b-4228-814d-53e35a24b517" containerName="init" Dec 15 07:14:57 crc kubenswrapper[4876]: E1215 07:14:57.242113 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12105428-dcb5-404f-91d8-db10b102c067" containerName="extract-utilities" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242123 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="12105428-dcb5-404f-91d8-db10b102c067" containerName="extract-utilities" Dec 15 07:14:57 crc kubenswrapper[4876]: E1215 07:14:57.242134 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12105428-dcb5-404f-91d8-db10b102c067" containerName="registry-server" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242140 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="12105428-dcb5-404f-91d8-db10b102c067" containerName="registry-server" Dec 15 07:14:57 crc kubenswrapper[4876]: E1215 07:14:57.242161 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12105428-dcb5-404f-91d8-db10b102c067" containerName="extract-content" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242167 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="12105428-dcb5-404f-91d8-db10b102c067" containerName="extract-content" Dec 15 07:14:57 crc kubenswrapper[4876]: E1215 07:14:57.242178 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7cb8571-8a9d-469b-af65-6dfd59cafeab" containerName="nova-manage" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242184 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7cb8571-8a9d-469b-af65-6dfd59cafeab" containerName="nova-manage" Dec 15 07:14:57 crc kubenswrapper[4876]: E1215 07:14:57.242200 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3224887d-8b3b-4228-814d-53e35a24b517" containerName="dnsmasq-dns" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242207 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3224887d-8b3b-4228-814d-53e35a24b517" containerName="dnsmasq-dns" Dec 15 07:14:57 crc kubenswrapper[4876]: E1215 07:14:57.242221 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61240875-a8dd-44c2-9082-f9270de53161" containerName="nova-cell1-conductor-db-sync" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242228 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="61240875-a8dd-44c2-9082-f9270de53161" containerName="nova-cell1-conductor-db-sync" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242378 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="61240875-a8dd-44c2-9082-f9270de53161" containerName="nova-cell1-conductor-db-sync" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242402 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7cb8571-8a9d-469b-af65-6dfd59cafeab" containerName="nova-manage" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242412 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3224887d-8b3b-4228-814d-53e35a24b517" containerName="dnsmasq-dns" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242424 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="12105428-dcb5-404f-91d8-db10b102c067" containerName="registry-server" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.242924 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.243011 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.246647 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.304303 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.305289 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="610c73db-685d-4269-8965-0605b4c2c007" containerName="nova-api-log" containerID="cri-o://6cc63ffbc17cf06b8d4cfe4ff0c86de11bdeacd322b606d8def30aa2fc2fd3bb" gracePeriod=30 Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.305458 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="610c73db-685d-4269-8965-0605b4c2c007" containerName="nova-api-api" containerID="cri-o://02ef34f441f26ba22d9ff427cfde8245b7c822cab5bec064cb00680a51445cfc" gracePeriod=30 Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.332283 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.333436 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" containerName="nova-metadata-log" containerID="cri-o://e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899" gracePeriod=30 Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.333964 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" containerName="nova-metadata-metadata" containerID="cri-o://d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7" gracePeriod=30 Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.345900 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.361244 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jh98q\" (UniqueName: \"kubernetes.io/projected/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-kube-api-access-jh98q\") pod \"nova-cell1-conductor-0\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.361373 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.361396 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.404123 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.404187 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.463874 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jh98q\" (UniqueName: \"kubernetes.io/projected/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-kube-api-access-jh98q\") pod \"nova-cell1-conductor-0\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.464082 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.464149 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.469212 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.473597 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.492434 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jh98q\" (UniqueName: \"kubernetes.io/projected/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-kube-api-access-jh98q\") pod \"nova-cell1-conductor-0\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:57 crc kubenswrapper[4876]: I1215 07:14:57.563793 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.081290 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.146945 4876 generic.go:334] "Generic (PLEG): container finished" podID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" containerID="d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7" exitCode=0 Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.146985 4876 generic.go:334] "Generic (PLEG): container finished" podID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" containerID="e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899" exitCode=143 Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.147062 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.147070 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c9a4f28-06df-4a38-8b65-40aa46ec0340","Type":"ContainerDied","Data":"d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7"} Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.147144 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c9a4f28-06df-4a38-8b65-40aa46ec0340","Type":"ContainerDied","Data":"e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899"} Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.147159 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3c9a4f28-06df-4a38-8b65-40aa46ec0340","Type":"ContainerDied","Data":"1034d14ece55b67d118af4d9312d0d0397637e8e1a87fd204744fdf1acc1398a"} Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.147182 4876 scope.go:117] "RemoveContainer" containerID="d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.157055 4876 generic.go:334] "Generic (PLEG): container finished" podID="610c73db-685d-4269-8965-0605b4c2c007" containerID="6cc63ffbc17cf06b8d4cfe4ff0c86de11bdeacd322b606d8def30aa2fc2fd3bb" exitCode=143 Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.157157 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"610c73db-685d-4269-8965-0605b4c2c007","Type":"ContainerDied","Data":"6cc63ffbc17cf06b8d4cfe4ff0c86de11bdeacd322b606d8def30aa2fc2fd3bb"} Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.164894 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1252b5d2-4d38-4d18-8614-70ec63b93b94" containerName="nova-scheduler-scheduler" containerID="cri-o://c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d" gracePeriod=30 Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.177353 4876 scope.go:117] "RemoveContainer" containerID="e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.181319 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-nova-metadata-tls-certs\") pod \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.181386 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-config-data\") pod \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.181509 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-combined-ca-bundle\") pod \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.181566 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcsw8\" (UniqueName: \"kubernetes.io/projected/3c9a4f28-06df-4a38-8b65-40aa46ec0340-kube-api-access-dcsw8\") pod \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.181612 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c9a4f28-06df-4a38-8b65-40aa46ec0340-logs\") pod \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\" (UID: \"3c9a4f28-06df-4a38-8b65-40aa46ec0340\") " Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.182917 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c9a4f28-06df-4a38-8b65-40aa46ec0340-logs" (OuterVolumeSpecName: "logs") pod "3c9a4f28-06df-4a38-8b65-40aa46ec0340" (UID: "3c9a4f28-06df-4a38-8b65-40aa46ec0340"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.190409 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c9a4f28-06df-4a38-8b65-40aa46ec0340-kube-api-access-dcsw8" (OuterVolumeSpecName: "kube-api-access-dcsw8") pod "3c9a4f28-06df-4a38-8b65-40aa46ec0340" (UID: "3c9a4f28-06df-4a38-8b65-40aa46ec0340"). InnerVolumeSpecName "kube-api-access-dcsw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.208999 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.221198 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c9a4f28-06df-4a38-8b65-40aa46ec0340" (UID: "3c9a4f28-06df-4a38-8b65-40aa46ec0340"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.241201 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "3c9a4f28-06df-4a38-8b65-40aa46ec0340" (UID: "3c9a4f28-06df-4a38-8b65-40aa46ec0340"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.244277 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-config-data" (OuterVolumeSpecName: "config-data") pod "3c9a4f28-06df-4a38-8b65-40aa46ec0340" (UID: "3c9a4f28-06df-4a38-8b65-40aa46ec0340"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.283454 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c9a4f28-06df-4a38-8b65-40aa46ec0340-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.283491 4876 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.283503 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.283511 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c9a4f28-06df-4a38-8b65-40aa46ec0340-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.283541 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcsw8\" (UniqueName: \"kubernetes.io/projected/3c9a4f28-06df-4a38-8b65-40aa46ec0340-kube-api-access-dcsw8\") on node \"crc\" DevicePath \"\"" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.303292 4876 scope.go:117] "RemoveContainer" containerID="d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7" Dec 15 07:14:58 crc kubenswrapper[4876]: E1215 07:14:58.304138 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7\": container with ID starting with d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7 not found: ID does not exist" containerID="d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.304181 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7"} err="failed to get container status \"d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7\": rpc error: code = NotFound desc = could not find container \"d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7\": container with ID starting with d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7 not found: ID does not exist" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.304214 4876 scope.go:117] "RemoveContainer" containerID="e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899" Dec 15 07:14:58 crc kubenswrapper[4876]: E1215 07:14:58.304467 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899\": container with ID starting with e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899 not found: ID does not exist" containerID="e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.304558 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899"} err="failed to get container status \"e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899\": rpc error: code = NotFound desc = could not find container \"e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899\": container with ID starting with e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899 not found: ID does not exist" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.304591 4876 scope.go:117] "RemoveContainer" containerID="d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.305090 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7"} err="failed to get container status \"d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7\": rpc error: code = NotFound desc = could not find container \"d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7\": container with ID starting with d6ca371bdc3c6ef756f73a6f8b6fbeef210fe9350fc700a4aab53ffbbfd449b7 not found: ID does not exist" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.305154 4876 scope.go:117] "RemoveContainer" containerID="e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.305569 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899"} err="failed to get container status \"e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899\": rpc error: code = NotFound desc = could not find container \"e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899\": container with ID starting with e08189480fb9a3863f0adb41d9767083f27284071698cfc0f3ed069059522899 not found: ID does not exist" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.484742 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.493882 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.506197 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:58 crc kubenswrapper[4876]: E1215 07:14:58.506700 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" containerName="nova-metadata-log" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.506731 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" containerName="nova-metadata-log" Dec 15 07:14:58 crc kubenswrapper[4876]: E1215 07:14:58.506753 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" containerName="nova-metadata-metadata" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.506763 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" containerName="nova-metadata-metadata" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.507011 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" containerName="nova-metadata-metadata" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.507034 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" containerName="nova-metadata-log" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.508167 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.510258 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.510717 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.520669 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.589312 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50416e26-c694-47d8-a4af-109bf57f6f36-logs\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.589376 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.589441 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.589471 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-config-data\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.589680 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqg5c\" (UniqueName: \"kubernetes.io/projected/50416e26-c694-47d8-a4af-109bf57f6f36-kube-api-access-tqg5c\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.691811 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.691891 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-config-data\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.691937 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqg5c\" (UniqueName: \"kubernetes.io/projected/50416e26-c694-47d8-a4af-109bf57f6f36-kube-api-access-tqg5c\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.692073 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50416e26-c694-47d8-a4af-109bf57f6f36-logs\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.692129 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.692933 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50416e26-c694-47d8-a4af-109bf57f6f36-logs\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.696505 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-config-data\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.696661 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.696659 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.718261 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3224887d-8b3b-4228-814d-53e35a24b517" path="/var/lib/kubelet/pods/3224887d-8b3b-4228-814d-53e35a24b517/volumes" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.719676 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c9a4f28-06df-4a38-8b65-40aa46ec0340" path="/var/lib/kubelet/pods/3c9a4f28-06df-4a38-8b65-40aa46ec0340/volumes" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.725403 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqg5c\" (UniqueName: \"kubernetes.io/projected/50416e26-c694-47d8-a4af-109bf57f6f36-kube-api-access-tqg5c\") pod \"nova-metadata-0\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " pod="openstack/nova-metadata-0" Dec 15 07:14:58 crc kubenswrapper[4876]: I1215 07:14:58.828901 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:14:59 crc kubenswrapper[4876]: I1215 07:14:59.177330 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2","Type":"ContainerStarted","Data":"7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233"} Dec 15 07:14:59 crc kubenswrapper[4876]: I1215 07:14:59.177613 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2","Type":"ContainerStarted","Data":"e6e5150701fcca7fbae0b016e8dcc307c5c0db80ec9e85b4e9bc18b48616403c"} Dec 15 07:14:59 crc kubenswrapper[4876]: I1215 07:14:59.177660 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 15 07:14:59 crc kubenswrapper[4876]: I1215 07:14:59.196633 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.196611283 podStartE2EDuration="2.196611283s" podCreationTimestamp="2025-12-15 07:14:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:14:59.193359016 +0000 UTC m=+1424.764501947" watchObservedRunningTime="2025-12-15 07:14:59.196611283 +0000 UTC m=+1424.767754194" Dec 15 07:14:59 crc kubenswrapper[4876]: I1215 07:14:59.285200 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:14:59 crc kubenswrapper[4876]: W1215 07:14:59.290832 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50416e26_c694_47d8_a4af_109bf57f6f36.slice/crio-2c646bc4d65cf229e3d6d0a3b32d18229b4f2d6e9b02c602a03928aacf0ed9e8 WatchSource:0}: Error finding container 2c646bc4d65cf229e3d6d0a3b32d18229b4f2d6e9b02c602a03928aacf0ed9e8: Status 404 returned error can't find the container with id 2c646bc4d65cf229e3d6d0a3b32d18229b4f2d6e9b02c602a03928aacf0ed9e8 Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.149546 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb"] Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.151146 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.154638 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.156964 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.162501 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb"] Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.189450 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50416e26-c694-47d8-a4af-109bf57f6f36","Type":"ContainerStarted","Data":"1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e"} Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.189522 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50416e26-c694-47d8-a4af-109bf57f6f36","Type":"ContainerStarted","Data":"3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590"} Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.189538 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50416e26-c694-47d8-a4af-109bf57f6f36","Type":"ContainerStarted","Data":"2c646bc4d65cf229e3d6d0a3b32d18229b4f2d6e9b02c602a03928aacf0ed9e8"} Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.223572 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.223549279 podStartE2EDuration="2.223549279s" podCreationTimestamp="2025-12-15 07:14:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:00.214355822 +0000 UTC m=+1425.785498743" watchObservedRunningTime="2025-12-15 07:15:00.223549279 +0000 UTC m=+1425.794692190" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.330004 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-secret-volume\") pod \"collect-profiles-29429715-qn2tb\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.330078 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcmms\" (UniqueName: \"kubernetes.io/projected/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-kube-api-access-tcmms\") pod \"collect-profiles-29429715-qn2tb\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.330246 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-config-volume\") pod \"collect-profiles-29429715-qn2tb\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.432592 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-secret-volume\") pod \"collect-profiles-29429715-qn2tb\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.433035 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcmms\" (UniqueName: \"kubernetes.io/projected/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-kube-api-access-tcmms\") pod \"collect-profiles-29429715-qn2tb\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.433249 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-config-volume\") pod \"collect-profiles-29429715-qn2tb\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.434523 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-config-volume\") pod \"collect-profiles-29429715-qn2tb\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.442558 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-secret-volume\") pod \"collect-profiles-29429715-qn2tb\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.452079 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcmms\" (UniqueName: \"kubernetes.io/projected/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-kube-api-access-tcmms\") pod \"collect-profiles-29429715-qn2tb\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.473404 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:00 crc kubenswrapper[4876]: E1215 07:15:00.658447 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 07:15:00 crc kubenswrapper[4876]: E1215 07:15:00.660903 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 07:15:00 crc kubenswrapper[4876]: E1215 07:15:00.662758 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 07:15:00 crc kubenswrapper[4876]: E1215 07:15:00.662831 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="1252b5d2-4d38-4d18-8614-70ec63b93b94" containerName="nova-scheduler-scheduler" Dec 15 07:15:00 crc kubenswrapper[4876]: I1215 07:15:00.925529 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb"] Dec 15 07:15:00 crc kubenswrapper[4876]: W1215 07:15:00.929766 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd59f4ef2_8822_4ce4_8b8f_5f2b3b75463a.slice/crio-f26b04619cb037ee67eae71f021415df5bcbb6e2d0ab8b5c616767ca42bd5d77 WatchSource:0}: Error finding container f26b04619cb037ee67eae71f021415df5bcbb6e2d0ab8b5c616767ca42bd5d77: Status 404 returned error can't find the container with id f26b04619cb037ee67eae71f021415df5bcbb6e2d0ab8b5c616767ca42bd5d77 Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.199538 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" event={"ID":"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a","Type":"ContainerStarted","Data":"78e4aa3572d08fd3a46a9cee1a0093bf6fe2a0c703edf0a99556aaca1cc65182"} Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.199933 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" event={"ID":"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a","Type":"ContainerStarted","Data":"f26b04619cb037ee67eae71f021415df5bcbb6e2d0ab8b5c616767ca42bd5d77"} Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.221971 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" podStartSLOduration=1.221954093 podStartE2EDuration="1.221954093s" podCreationTimestamp="2025-12-15 07:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:01.218004617 +0000 UTC m=+1426.789147518" watchObservedRunningTime="2025-12-15 07:15:01.221954093 +0000 UTC m=+1426.793097004" Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.331369 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.331744 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="2860f415-69bd-417c-a89d-00515806360b" containerName="kube-state-metrics" containerID="cri-o://1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4" gracePeriod=30 Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.695457 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.749540 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.867855 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-combined-ca-bundle\") pod \"1252b5d2-4d38-4d18-8614-70ec63b93b94\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.867921 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87vhp\" (UniqueName: \"kubernetes.io/projected/2860f415-69bd-417c-a89d-00515806360b-kube-api-access-87vhp\") pod \"2860f415-69bd-417c-a89d-00515806360b\" (UID: \"2860f415-69bd-417c-a89d-00515806360b\") " Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.868013 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-config-data\") pod \"1252b5d2-4d38-4d18-8614-70ec63b93b94\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.868111 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgh4j\" (UniqueName: \"kubernetes.io/projected/1252b5d2-4d38-4d18-8614-70ec63b93b94-kube-api-access-lgh4j\") pod \"1252b5d2-4d38-4d18-8614-70ec63b93b94\" (UID: \"1252b5d2-4d38-4d18-8614-70ec63b93b94\") " Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.873921 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2860f415-69bd-417c-a89d-00515806360b-kube-api-access-87vhp" (OuterVolumeSpecName: "kube-api-access-87vhp") pod "2860f415-69bd-417c-a89d-00515806360b" (UID: "2860f415-69bd-417c-a89d-00515806360b"). InnerVolumeSpecName "kube-api-access-87vhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.874303 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1252b5d2-4d38-4d18-8614-70ec63b93b94-kube-api-access-lgh4j" (OuterVolumeSpecName: "kube-api-access-lgh4j") pod "1252b5d2-4d38-4d18-8614-70ec63b93b94" (UID: "1252b5d2-4d38-4d18-8614-70ec63b93b94"). InnerVolumeSpecName "kube-api-access-lgh4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.906452 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1252b5d2-4d38-4d18-8614-70ec63b93b94" (UID: "1252b5d2-4d38-4d18-8614-70ec63b93b94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.914185 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-config-data" (OuterVolumeSpecName: "config-data") pod "1252b5d2-4d38-4d18-8614-70ec63b93b94" (UID: "1252b5d2-4d38-4d18-8614-70ec63b93b94"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.974452 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgh4j\" (UniqueName: \"kubernetes.io/projected/1252b5d2-4d38-4d18-8614-70ec63b93b94-kube-api-access-lgh4j\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.974490 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.974501 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87vhp\" (UniqueName: \"kubernetes.io/projected/2860f415-69bd-417c-a89d-00515806360b-kube-api-access-87vhp\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:01 crc kubenswrapper[4876]: I1215 07:15:01.974513 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1252b5d2-4d38-4d18-8614-70ec63b93b94-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.213587 4876 generic.go:334] "Generic (PLEG): container finished" podID="2860f415-69bd-417c-a89d-00515806360b" containerID="1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4" exitCode=2 Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.213657 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2860f415-69bd-417c-a89d-00515806360b","Type":"ContainerDied","Data":"1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4"} Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.213669 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.213687 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"2860f415-69bd-417c-a89d-00515806360b","Type":"ContainerDied","Data":"218fbf7bf49be07d8362991d22a4df5766923edbf73e3d27104c3fa7937c7944"} Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.213705 4876 scope.go:117] "RemoveContainer" containerID="1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.215883 4876 generic.go:334] "Generic (PLEG): container finished" podID="1252b5d2-4d38-4d18-8614-70ec63b93b94" containerID="c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d" exitCode=0 Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.215940 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1252b5d2-4d38-4d18-8614-70ec63b93b94","Type":"ContainerDied","Data":"c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d"} Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.215963 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1252b5d2-4d38-4d18-8614-70ec63b93b94","Type":"ContainerDied","Data":"60d6e85c1c4890832bfc9472c5eb43d4c04be81710c984235d89b93063cba4ec"} Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.216006 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.218749 4876 generic.go:334] "Generic (PLEG): container finished" podID="d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a" containerID="78e4aa3572d08fd3a46a9cee1a0093bf6fe2a0c703edf0a99556aaca1cc65182" exitCode=0 Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.218810 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" event={"ID":"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a","Type":"ContainerDied","Data":"78e4aa3572d08fd3a46a9cee1a0093bf6fe2a0c703edf0a99556aaca1cc65182"} Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.254681 4876 scope.go:117] "RemoveContainer" containerID="1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4" Dec 15 07:15:02 crc kubenswrapper[4876]: E1215 07:15:02.256185 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4\": container with ID starting with 1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4 not found: ID does not exist" containerID="1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.256261 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4"} err="failed to get container status \"1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4\": rpc error: code = NotFound desc = could not find container \"1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4\": container with ID starting with 1f8f9a75490fc6b928f4fb0020329973b2dd8e1f77f5e7e9865f07bfbe8999a4 not found: ID does not exist" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.256308 4876 scope.go:117] "RemoveContainer" containerID="c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.276245 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.294478 4876 scope.go:117] "RemoveContainer" containerID="c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d" Dec 15 07:15:02 crc kubenswrapper[4876]: E1215 07:15:02.295133 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d\": container with ID starting with c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d not found: ID does not exist" containerID="c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.295185 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d"} err="failed to get container status \"c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d\": rpc error: code = NotFound desc = could not find container \"c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d\": container with ID starting with c9a9d9346b5d860f76bf15bc117e1327449a45b322202779da7ed6563026a90d not found: ID does not exist" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.297973 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.327480 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.338565 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.353700 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:15:02 crc kubenswrapper[4876]: E1215 07:15:02.354336 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2860f415-69bd-417c-a89d-00515806360b" containerName="kube-state-metrics" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.354361 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2860f415-69bd-417c-a89d-00515806360b" containerName="kube-state-metrics" Dec 15 07:15:02 crc kubenswrapper[4876]: E1215 07:15:02.354412 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1252b5d2-4d38-4d18-8614-70ec63b93b94" containerName="nova-scheduler-scheduler" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.354424 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="1252b5d2-4d38-4d18-8614-70ec63b93b94" containerName="nova-scheduler-scheduler" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.354690 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="1252b5d2-4d38-4d18-8614-70ec63b93b94" containerName="nova-scheduler-scheduler" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.354714 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2860f415-69bd-417c-a89d-00515806360b" containerName="kube-state-metrics" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.355645 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.358441 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.358626 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.365581 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.367550 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.372453 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.379008 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.392397 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.491820 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.491899 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhxjn\" (UniqueName: \"kubernetes.io/projected/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-kube-api-access-qhxjn\") pod \"nova-scheduler-0\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.491928 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.491979 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbtzd\" (UniqueName: \"kubernetes.io/projected/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-api-access-jbtzd\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.492034 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.492067 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-config-data\") pod \"nova-scheduler-0\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.492097 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.593353 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.593677 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.593816 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhxjn\" (UniqueName: \"kubernetes.io/projected/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-kube-api-access-qhxjn\") pod \"nova-scheduler-0\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.593930 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.594057 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbtzd\" (UniqueName: \"kubernetes.io/projected/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-api-access-jbtzd\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.594230 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.594381 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-config-data\") pod \"nova-scheduler-0\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.598972 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-config-data\") pod \"nova-scheduler-0\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.599855 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.602647 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.608550 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.610227 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.612625 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhxjn\" (UniqueName: \"kubernetes.io/projected/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-kube-api-access-qhxjn\") pod \"nova-scheduler-0\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.613911 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbtzd\" (UniqueName: \"kubernetes.io/projected/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-api-access-jbtzd\") pod \"kube-state-metrics-0\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.684515 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.698134 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.717239 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1252b5d2-4d38-4d18-8614-70ec63b93b94" path="/var/lib/kubelet/pods/1252b5d2-4d38-4d18-8614-70ec63b93b94/volumes" Dec 15 07:15:02 crc kubenswrapper[4876]: I1215 07:15:02.717924 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2860f415-69bd-417c-a89d-00515806360b" path="/var/lib/kubelet/pods/2860f415-69bd-417c-a89d-00515806360b/volumes" Dec 15 07:15:03 crc kubenswrapper[4876]: W1215 07:15:03.200719 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39f17917_e5e7_4c0b_bddb_d7ce2fbc71d1.slice/crio-56a4d3eb557314a531f157c23561cd2f095d0d9f484d0172a4eeb015f9fd4a5d WatchSource:0}: Error finding container 56a4d3eb557314a531f157c23561cd2f095d0d9f484d0172a4eeb015f9fd4a5d: Status 404 returned error can't find the container with id 56a4d3eb557314a531f157c23561cd2f095d0d9f484d0172a4eeb015f9fd4a5d Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.201896 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.247361 4876 generic.go:334] "Generic (PLEG): container finished" podID="610c73db-685d-4269-8965-0605b4c2c007" containerID="02ef34f441f26ba22d9ff427cfde8245b7c822cab5bec064cb00680a51445cfc" exitCode=0 Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.247431 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"610c73db-685d-4269-8965-0605b4c2c007","Type":"ContainerDied","Data":"02ef34f441f26ba22d9ff427cfde8245b7c822cab5bec064cb00680a51445cfc"} Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.252675 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1","Type":"ContainerStarted","Data":"56a4d3eb557314a531f157c23561cd2f095d0d9f484d0172a4eeb015f9fd4a5d"} Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.314657 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.315690 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="ceilometer-central-agent" containerID="cri-o://ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7" gracePeriod=30 Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.315800 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="proxy-httpd" containerID="cri-o://42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192" gracePeriod=30 Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.315831 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="sg-core" containerID="cri-o://67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d" gracePeriod=30 Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.315836 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="ceilometer-notification-agent" containerID="cri-o://7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661" gracePeriod=30 Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.344772 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.434968 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.525876 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/610c73db-685d-4269-8965-0605b4c2c007-logs\") pod \"610c73db-685d-4269-8965-0605b4c2c007\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.525919 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-combined-ca-bundle\") pod \"610c73db-685d-4269-8965-0605b4c2c007\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.525985 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvbhk\" (UniqueName: \"kubernetes.io/projected/610c73db-685d-4269-8965-0605b4c2c007-kube-api-access-rvbhk\") pod \"610c73db-685d-4269-8965-0605b4c2c007\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.526013 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-config-data\") pod \"610c73db-685d-4269-8965-0605b4c2c007\" (UID: \"610c73db-685d-4269-8965-0605b4c2c007\") " Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.526772 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/610c73db-685d-4269-8965-0605b4c2c007-logs" (OuterVolumeSpecName: "logs") pod "610c73db-685d-4269-8965-0605b4c2c007" (UID: "610c73db-685d-4269-8965-0605b4c2c007"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.532531 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/610c73db-685d-4269-8965-0605b4c2c007-kube-api-access-rvbhk" (OuterVolumeSpecName: "kube-api-access-rvbhk") pod "610c73db-685d-4269-8965-0605b4c2c007" (UID: "610c73db-685d-4269-8965-0605b4c2c007"). InnerVolumeSpecName "kube-api-access-rvbhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.567127 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "610c73db-685d-4269-8965-0605b4c2c007" (UID: "610c73db-685d-4269-8965-0605b4c2c007"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.570124 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-config-data" (OuterVolumeSpecName: "config-data") pod "610c73db-685d-4269-8965-0605b4c2c007" (UID: "610c73db-685d-4269-8965-0605b4c2c007"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.592017 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.632388 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/610c73db-685d-4269-8965-0605b4c2c007-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.632427 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.632437 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvbhk\" (UniqueName: \"kubernetes.io/projected/610c73db-685d-4269-8965-0605b4c2c007-kube-api-access-rvbhk\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.632445 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/610c73db-685d-4269-8965-0605b4c2c007-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.734507 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-config-volume\") pod \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.735021 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-secret-volume\") pod \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.735179 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcmms\" (UniqueName: \"kubernetes.io/projected/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-kube-api-access-tcmms\") pod \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\" (UID: \"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a\") " Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.735867 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-config-volume" (OuterVolumeSpecName: "config-volume") pod "d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a" (UID: "d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.736787 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.740929 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a" (UID: "d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.741074 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-kube-api-access-tcmms" (OuterVolumeSpecName: "kube-api-access-tcmms") pod "d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a" (UID: "d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a"). InnerVolumeSpecName "kube-api-access-tcmms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.828978 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.829456 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.839186 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:03 crc kubenswrapper[4876]: I1215 07:15:03.839542 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcmms\" (UniqueName: \"kubernetes.io/projected/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a-kube-api-access-tcmms\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.266395 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" event={"ID":"d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a","Type":"ContainerDied","Data":"f26b04619cb037ee67eae71f021415df5bcbb6e2d0ab8b5c616767ca42bd5d77"} Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.266434 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f26b04619cb037ee67eae71f021415df5bcbb6e2d0ab8b5c616767ca42bd5d77" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.266481 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.274273 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"610c73db-685d-4269-8965-0605b4c2c007","Type":"ContainerDied","Data":"8e5688e77da66d3c3b6f05d9be0ecb8f330111d673a34740a49dae117a678cbc"} Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.274322 4876 scope.go:117] "RemoveContainer" containerID="02ef34f441f26ba22d9ff427cfde8245b7c822cab5bec064cb00680a51445cfc" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.274422 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.285695 4876 generic.go:334] "Generic (PLEG): container finished" podID="13a01103-7d15-4664-b1f9-ced426f919b6" containerID="42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192" exitCode=0 Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.285723 4876 generic.go:334] "Generic (PLEG): container finished" podID="13a01103-7d15-4664-b1f9-ced426f919b6" containerID="67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d" exitCode=2 Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.285733 4876 generic.go:334] "Generic (PLEG): container finished" podID="13a01103-7d15-4664-b1f9-ced426f919b6" containerID="ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7" exitCode=0 Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.285782 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13a01103-7d15-4664-b1f9-ced426f919b6","Type":"ContainerDied","Data":"42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192"} Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.285818 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13a01103-7d15-4664-b1f9-ced426f919b6","Type":"ContainerDied","Data":"67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d"} Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.285829 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13a01103-7d15-4664-b1f9-ced426f919b6","Type":"ContainerDied","Data":"ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7"} Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.288900 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"167c4c13-f766-4ecc-8d1c-0ba875acaf9b","Type":"ContainerStarted","Data":"e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2"} Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.288942 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"167c4c13-f766-4ecc-8d1c-0ba875acaf9b","Type":"ContainerStarted","Data":"f36d96a36cb1f8bc59060dffadaf81f4f2a88aa6a7b134a327db8d75160d97c5"} Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.292768 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1","Type":"ContainerStarted","Data":"b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c"} Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.293468 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.315795 4876 scope.go:117] "RemoveContainer" containerID="6cc63ffbc17cf06b8d4cfe4ff0c86de11bdeacd322b606d8def30aa2fc2fd3bb" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.325474 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.325436587 podStartE2EDuration="2.325436587s" podCreationTimestamp="2025-12-15 07:15:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:04.305747218 +0000 UTC m=+1429.876890139" watchObservedRunningTime="2025-12-15 07:15:04.325436587 +0000 UTC m=+1429.896579508" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.339575 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.361285 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.385177 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:04 crc kubenswrapper[4876]: E1215 07:15:04.385701 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a" containerName="collect-profiles" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.385727 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a" containerName="collect-profiles" Dec 15 07:15:04 crc kubenswrapper[4876]: E1215 07:15:04.385761 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="610c73db-685d-4269-8965-0605b4c2c007" containerName="nova-api-api" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.385771 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="610c73db-685d-4269-8965-0605b4c2c007" containerName="nova-api-api" Dec 15 07:15:04 crc kubenswrapper[4876]: E1215 07:15:04.385799 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="610c73db-685d-4269-8965-0605b4c2c007" containerName="nova-api-log" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.385809 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="610c73db-685d-4269-8965-0605b4c2c007" containerName="nova-api-log" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.386010 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="610c73db-685d-4269-8965-0605b4c2c007" containerName="nova-api-log" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.386039 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a" containerName="collect-profiles" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.386065 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="610c73db-685d-4269-8965-0605b4c2c007" containerName="nova-api-api" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.387179 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.389034 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.393029 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.929968855 podStartE2EDuration="2.392999241s" podCreationTimestamp="2025-12-15 07:15:02 +0000 UTC" firstStartedPulling="2025-12-15 07:15:03.203444272 +0000 UTC m=+1428.774587203" lastFinishedPulling="2025-12-15 07:15:03.666474678 +0000 UTC m=+1429.237617589" observedRunningTime="2025-12-15 07:15:04.350101468 +0000 UTC m=+1429.921244399" watchObservedRunningTime="2025-12-15 07:15:04.392999241 +0000 UTC m=+1429.964142152" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.414904 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.455725 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlfzw\" (UniqueName: \"kubernetes.io/projected/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-kube-api-access-xlfzw\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.455808 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.456054 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-config-data\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.456692 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-logs\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.558874 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-logs\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.559040 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlfzw\" (UniqueName: \"kubernetes.io/projected/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-kube-api-access-xlfzw\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.559097 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.559217 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-config-data\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.559610 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-logs\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.571395 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.571427 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-config-data\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.579427 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlfzw\" (UniqueName: \"kubernetes.io/projected/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-kube-api-access-xlfzw\") pod \"nova-api-0\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.704682 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:04 crc kubenswrapper[4876]: I1215 07:15:04.722109 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="610c73db-685d-4269-8965-0605b4c2c007" path="/var/lib/kubelet/pods/610c73db-685d-4269-8965-0605b4c2c007/volumes" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.142982 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:05 crc kubenswrapper[4876]: W1215 07:15:05.150351 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b4ddac3_9a40_4d3c_8e40_3757e43c4ceb.slice/crio-bf661437a0d06040629e633a84d17297c214d1911d0e77e5168b10145cf66da9 WatchSource:0}: Error finding container bf661437a0d06040629e633a84d17297c214d1911d0e77e5168b10145cf66da9: Status 404 returned error can't find the container with id bf661437a0d06040629e633a84d17297c214d1911d0e77e5168b10145cf66da9 Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.308160 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb","Type":"ContainerStarted","Data":"bf661437a0d06040629e633a84d17297c214d1911d0e77e5168b10145cf66da9"} Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.647146 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.706051 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-config-data\") pod \"13a01103-7d15-4664-b1f9-ced426f919b6\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.706371 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-sg-core-conf-yaml\") pod \"13a01103-7d15-4664-b1f9-ced426f919b6\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.706553 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc4fq\" (UniqueName: \"kubernetes.io/projected/13a01103-7d15-4664-b1f9-ced426f919b6-kube-api-access-gc4fq\") pod \"13a01103-7d15-4664-b1f9-ced426f919b6\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.706587 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-combined-ca-bundle\") pod \"13a01103-7d15-4664-b1f9-ced426f919b6\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.706677 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-scripts\") pod \"13a01103-7d15-4664-b1f9-ced426f919b6\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.706706 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-log-httpd\") pod \"13a01103-7d15-4664-b1f9-ced426f919b6\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.706916 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-run-httpd\") pod \"13a01103-7d15-4664-b1f9-ced426f919b6\" (UID: \"13a01103-7d15-4664-b1f9-ced426f919b6\") " Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.707547 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "13a01103-7d15-4664-b1f9-ced426f919b6" (UID: "13a01103-7d15-4664-b1f9-ced426f919b6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.707555 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "13a01103-7d15-4664-b1f9-ced426f919b6" (UID: "13a01103-7d15-4664-b1f9-ced426f919b6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.708841 4876 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.708866 4876 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13a01103-7d15-4664-b1f9-ced426f919b6-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.714329 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-scripts" (OuterVolumeSpecName: "scripts") pod "13a01103-7d15-4664-b1f9-ced426f919b6" (UID: "13a01103-7d15-4664-b1f9-ced426f919b6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.723800 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13a01103-7d15-4664-b1f9-ced426f919b6-kube-api-access-gc4fq" (OuterVolumeSpecName: "kube-api-access-gc4fq") pod "13a01103-7d15-4664-b1f9-ced426f919b6" (UID: "13a01103-7d15-4664-b1f9-ced426f919b6"). InnerVolumeSpecName "kube-api-access-gc4fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.732398 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "13a01103-7d15-4664-b1f9-ced426f919b6" (UID: "13a01103-7d15-4664-b1f9-ced426f919b6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.802670 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13a01103-7d15-4664-b1f9-ced426f919b6" (UID: "13a01103-7d15-4664-b1f9-ced426f919b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.810883 4876 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.810908 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc4fq\" (UniqueName: \"kubernetes.io/projected/13a01103-7d15-4664-b1f9-ced426f919b6-kube-api-access-gc4fq\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.810920 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.810930 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.831671 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-config-data" (OuterVolumeSpecName: "config-data") pod "13a01103-7d15-4664-b1f9-ced426f919b6" (UID: "13a01103-7d15-4664-b1f9-ced426f919b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:05 crc kubenswrapper[4876]: I1215 07:15:05.912746 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13a01103-7d15-4664-b1f9-ced426f919b6-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.318168 4876 generic.go:334] "Generic (PLEG): container finished" podID="13a01103-7d15-4664-b1f9-ced426f919b6" containerID="7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661" exitCode=0 Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.318231 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.318238 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13a01103-7d15-4664-b1f9-ced426f919b6","Type":"ContainerDied","Data":"7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661"} Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.318593 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13a01103-7d15-4664-b1f9-ced426f919b6","Type":"ContainerDied","Data":"f801da5e300b3da133c3806346ba5113526c1125ad17edce852cc8fc51b38134"} Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.318636 4876 scope.go:117] "RemoveContainer" containerID="42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.320584 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb","Type":"ContainerStarted","Data":"55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3"} Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.320612 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb","Type":"ContainerStarted","Data":"088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97"} Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.349409 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.349387725 podStartE2EDuration="2.349387725s" podCreationTimestamp="2025-12-15 07:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:06.342696656 +0000 UTC m=+1431.913839567" watchObservedRunningTime="2025-12-15 07:15:06.349387725 +0000 UTC m=+1431.920530646" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.356411 4876 scope.go:117] "RemoveContainer" containerID="67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.375126 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.386720 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.389052 4876 scope.go:117] "RemoveContainer" containerID="7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.400080 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:06 crc kubenswrapper[4876]: E1215 07:15:06.400597 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="ceilometer-central-agent" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.400618 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="ceilometer-central-agent" Dec 15 07:15:06 crc kubenswrapper[4876]: E1215 07:15:06.400645 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="proxy-httpd" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.400654 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="proxy-httpd" Dec 15 07:15:06 crc kubenswrapper[4876]: E1215 07:15:06.400668 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="sg-core" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.400676 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="sg-core" Dec 15 07:15:06 crc kubenswrapper[4876]: E1215 07:15:06.400694 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="ceilometer-notification-agent" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.400702 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="ceilometer-notification-agent" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.400923 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="ceilometer-central-agent" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.400947 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="sg-core" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.400960 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="ceilometer-notification-agent" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.400982 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" containerName="proxy-httpd" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.403207 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.405888 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.406210 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.406329 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.410774 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.444237 4876 scope.go:117] "RemoveContainer" containerID="ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.481080 4876 scope.go:117] "RemoveContainer" containerID="42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192" Dec 15 07:15:06 crc kubenswrapper[4876]: E1215 07:15:06.481789 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192\": container with ID starting with 42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192 not found: ID does not exist" containerID="42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.481822 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192"} err="failed to get container status \"42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192\": rpc error: code = NotFound desc = could not find container \"42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192\": container with ID starting with 42d37c6205c065706b9e48ff4b8ec152063efd0e5a867d94994af1d0bd1c1192 not found: ID does not exist" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.481842 4876 scope.go:117] "RemoveContainer" containerID="67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d" Dec 15 07:15:06 crc kubenswrapper[4876]: E1215 07:15:06.482066 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d\": container with ID starting with 67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d not found: ID does not exist" containerID="67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.482090 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d"} err="failed to get container status \"67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d\": rpc error: code = NotFound desc = could not find container \"67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d\": container with ID starting with 67ad6b7c40c6c8d797ca4f147ea9ae3ed0c4a21571079bedd09b2718e8cf071d not found: ID does not exist" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.482122 4876 scope.go:117] "RemoveContainer" containerID="7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661" Dec 15 07:15:06 crc kubenswrapper[4876]: E1215 07:15:06.482460 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661\": container with ID starting with 7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661 not found: ID does not exist" containerID="7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.482485 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661"} err="failed to get container status \"7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661\": rpc error: code = NotFound desc = could not find container \"7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661\": container with ID starting with 7b4f33fc169f2ce5e893ee9715fbd4d9dbc6da229086be903395e1adbb62f661 not found: ID does not exist" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.482498 4876 scope.go:117] "RemoveContainer" containerID="ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7" Dec 15 07:15:06 crc kubenswrapper[4876]: E1215 07:15:06.482749 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7\": container with ID starting with ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7 not found: ID does not exist" containerID="ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.482773 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7"} err="failed to get container status \"ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7\": rpc error: code = NotFound desc = could not find container \"ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7\": container with ID starting with ca156ec2d4cbcc16e127938e0f90763190b4f9364884938c29b3995e3f7c74d7 not found: ID does not exist" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.533387 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-config-data\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.533470 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.533510 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.533534 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-log-httpd\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.533574 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-run-httpd\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.533639 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.533696 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-scripts\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.533771 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz9b8\" (UniqueName: \"kubernetes.io/projected/abfee997-3c99-4413-b345-2fbcd1db2f93-kube-api-access-mz9b8\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.635028 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-run-httpd\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.635192 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.635253 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-scripts\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.635327 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz9b8\" (UniqueName: \"kubernetes.io/projected/abfee997-3c99-4413-b345-2fbcd1db2f93-kube-api-access-mz9b8\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.635397 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-config-data\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.635438 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.635467 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.635492 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-log-httpd\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.635521 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-run-httpd\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.635906 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-log-httpd\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.639226 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-scripts\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.639525 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-config-data\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.639700 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.641128 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.648483 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.661198 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz9b8\" (UniqueName: \"kubernetes.io/projected/abfee997-3c99-4413-b345-2fbcd1db2f93-kube-api-access-mz9b8\") pod \"ceilometer-0\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " pod="openstack/ceilometer-0" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.720019 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13a01103-7d15-4664-b1f9-ced426f919b6" path="/var/lib/kubelet/pods/13a01103-7d15-4664-b1f9-ced426f919b6/volumes" Dec 15 07:15:06 crc kubenswrapper[4876]: I1215 07:15:06.754491 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:15:07 crc kubenswrapper[4876]: W1215 07:15:07.204149 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabfee997_3c99_4413_b345_2fbcd1db2f93.slice/crio-2d1fc830e592b458ed439339b005961ec2107ef80653b70c2074983959480955 WatchSource:0}: Error finding container 2d1fc830e592b458ed439339b005961ec2107ef80653b70c2074983959480955: Status 404 returned error can't find the container with id 2d1fc830e592b458ed439339b005961ec2107ef80653b70c2074983959480955 Dec 15 07:15:07 crc kubenswrapper[4876]: I1215 07:15:07.208731 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:07 crc kubenswrapper[4876]: I1215 07:15:07.330370 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abfee997-3c99-4413-b345-2fbcd1db2f93","Type":"ContainerStarted","Data":"2d1fc830e592b458ed439339b005961ec2107ef80653b70c2074983959480955"} Dec 15 07:15:07 crc kubenswrapper[4876]: I1215 07:15:07.592494 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 15 07:15:07 crc kubenswrapper[4876]: I1215 07:15:07.699524 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 15 07:15:08 crc kubenswrapper[4876]: I1215 07:15:08.340432 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abfee997-3c99-4413-b345-2fbcd1db2f93","Type":"ContainerStarted","Data":"d31aed56f79830fc32e7559cde95bf21945913848c9554d07c4af91ff662751c"} Dec 15 07:15:08 crc kubenswrapper[4876]: I1215 07:15:08.830373 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 07:15:08 crc kubenswrapper[4876]: I1215 07:15:08.845751 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 07:15:09 crc kubenswrapper[4876]: I1215 07:15:09.351630 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abfee997-3c99-4413-b345-2fbcd1db2f93","Type":"ContainerStarted","Data":"5f7084336b1200a91fec207a506ecd053c4f05466ef7f3afe1bca3b5f6a0da3c"} Dec 15 07:15:09 crc kubenswrapper[4876]: I1215 07:15:09.870297 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 15 07:15:09 crc kubenswrapper[4876]: I1215 07:15:09.874302 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 15 07:15:10 crc kubenswrapper[4876]: I1215 07:15:10.364512 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abfee997-3c99-4413-b345-2fbcd1db2f93","Type":"ContainerStarted","Data":"a9cd568082b0d01fcd9ac76f7b4607cb411796beff0e07e6290fe3d8607c05b9"} Dec 15 07:15:12 crc kubenswrapper[4876]: I1215 07:15:12.404150 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abfee997-3c99-4413-b345-2fbcd1db2f93","Type":"ContainerStarted","Data":"285b6d623463654e74a0ee227ad1e333fcec467b278a9da3f54ccc9c7f8d59ce"} Dec 15 07:15:12 crc kubenswrapper[4876]: I1215 07:15:12.404780 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 15 07:15:12 crc kubenswrapper[4876]: I1215 07:15:12.433442 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.45095896 podStartE2EDuration="6.43342258s" podCreationTimestamp="2025-12-15 07:15:06 +0000 UTC" firstStartedPulling="2025-12-15 07:15:07.205978761 +0000 UTC m=+1432.777121672" lastFinishedPulling="2025-12-15 07:15:11.188442391 +0000 UTC m=+1436.759585292" observedRunningTime="2025-12-15 07:15:12.423634137 +0000 UTC m=+1437.994777038" watchObservedRunningTime="2025-12-15 07:15:12.43342258 +0000 UTC m=+1438.004565491" Dec 15 07:15:12 crc kubenswrapper[4876]: I1215 07:15:12.694328 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 15 07:15:12 crc kubenswrapper[4876]: I1215 07:15:12.699040 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 15 07:15:12 crc kubenswrapper[4876]: I1215 07:15:12.746802 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 15 07:15:13 crc kubenswrapper[4876]: I1215 07:15:13.445631 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 15 07:15:14 crc kubenswrapper[4876]: I1215 07:15:14.714749 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 07:15:14 crc kubenswrapper[4876]: I1215 07:15:14.742579 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 07:15:15 crc kubenswrapper[4876]: I1215 07:15:15.746399 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 07:15:15 crc kubenswrapper[4876]: I1215 07:15:15.787291 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 07:15:18 crc kubenswrapper[4876]: I1215 07:15:18.833407 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 15 07:15:18 crc kubenswrapper[4876]: I1215 07:15:18.834832 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 15 07:15:18 crc kubenswrapper[4876]: I1215 07:15:18.842626 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 15 07:15:18 crc kubenswrapper[4876]: I1215 07:15:18.844511 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.412906 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.483846 4876 generic.go:334] "Generic (PLEG): container finished" podID="de596f2b-465e-4b52-9b25-c41022b105b1" containerID="6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c" exitCode=137 Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.483907 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"de596f2b-465e-4b52-9b25-c41022b105b1","Type":"ContainerDied","Data":"6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c"} Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.483967 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.483999 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"de596f2b-465e-4b52-9b25-c41022b105b1","Type":"ContainerDied","Data":"fdfeaada17211114f431ef6ead457d453bf968e970d5abc293f758c7bbc32b83"} Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.484023 4876 scope.go:117] "RemoveContainer" containerID="6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.510315 4876 scope.go:117] "RemoveContainer" containerID="6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c" Dec 15 07:15:20 crc kubenswrapper[4876]: E1215 07:15:20.510957 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c\": container with ID starting with 6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c not found: ID does not exist" containerID="6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.510997 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c"} err="failed to get container status \"6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c\": rpc error: code = NotFound desc = could not find container \"6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c\": container with ID starting with 6b1fcec7cb730f55fc223683fbdcdf59c87f6dbedc1c4750553db832d6fa167c not found: ID does not exist" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.562899 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g65x8\" (UniqueName: \"kubernetes.io/projected/de596f2b-465e-4b52-9b25-c41022b105b1-kube-api-access-g65x8\") pod \"de596f2b-465e-4b52-9b25-c41022b105b1\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.564091 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-config-data\") pod \"de596f2b-465e-4b52-9b25-c41022b105b1\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.564283 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-combined-ca-bundle\") pod \"de596f2b-465e-4b52-9b25-c41022b105b1\" (UID: \"de596f2b-465e-4b52-9b25-c41022b105b1\") " Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.567849 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de596f2b-465e-4b52-9b25-c41022b105b1-kube-api-access-g65x8" (OuterVolumeSpecName: "kube-api-access-g65x8") pod "de596f2b-465e-4b52-9b25-c41022b105b1" (UID: "de596f2b-465e-4b52-9b25-c41022b105b1"). InnerVolumeSpecName "kube-api-access-g65x8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.587762 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-config-data" (OuterVolumeSpecName: "config-data") pod "de596f2b-465e-4b52-9b25-c41022b105b1" (UID: "de596f2b-465e-4b52-9b25-c41022b105b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.593402 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de596f2b-465e-4b52-9b25-c41022b105b1" (UID: "de596f2b-465e-4b52-9b25-c41022b105b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.668553 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g65x8\" (UniqueName: \"kubernetes.io/projected/de596f2b-465e-4b52-9b25-c41022b105b1-kube-api-access-g65x8\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.668867 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.668950 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de596f2b-465e-4b52-9b25-c41022b105b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.807572 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.816337 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.845594 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:15:20 crc kubenswrapper[4876]: E1215 07:15:20.846186 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de596f2b-465e-4b52-9b25-c41022b105b1" containerName="nova-cell1-novncproxy-novncproxy" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.846210 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="de596f2b-465e-4b52-9b25-c41022b105b1" containerName="nova-cell1-novncproxy-novncproxy" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.846451 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="de596f2b-465e-4b52-9b25-c41022b105b1" containerName="nova-cell1-novncproxy-novncproxy" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.847036 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.855447 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.882391 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.882886 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.882886 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.985320 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.985825 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.985930 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.985999 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl2zf\" (UniqueName: \"kubernetes.io/projected/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-kube-api-access-cl2zf\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:20 crc kubenswrapper[4876]: I1215 07:15:20.986058 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.086991 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.087058 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.087084 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.087159 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.087224 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl2zf\" (UniqueName: \"kubernetes.io/projected/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-kube-api-access-cl2zf\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.091132 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.092047 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.092711 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.093536 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.104049 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl2zf\" (UniqueName: \"kubernetes.io/projected/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-kube-api-access-cl2zf\") pod \"nova-cell1-novncproxy-0\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.209557 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:21 crc kubenswrapper[4876]: W1215 07:15:21.627095 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba4acd07_a5cb_47a7_9f1f_0bc818d3f738.slice/crio-4f714935ef68bdd21888a723081d129d8c2a2d7d72387377eabd8eda80890662 WatchSource:0}: Error finding container 4f714935ef68bdd21888a723081d129d8c2a2d7d72387377eabd8eda80890662: Status 404 returned error can't find the container with id 4f714935ef68bdd21888a723081d129d8c2a2d7d72387377eabd8eda80890662 Dec 15 07:15:21 crc kubenswrapper[4876]: I1215 07:15:21.635640 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:15:22 crc kubenswrapper[4876]: I1215 07:15:22.503663 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738","Type":"ContainerStarted","Data":"7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1"} Dec 15 07:15:22 crc kubenswrapper[4876]: I1215 07:15:22.503921 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738","Type":"ContainerStarted","Data":"4f714935ef68bdd21888a723081d129d8c2a2d7d72387377eabd8eda80890662"} Dec 15 07:15:22 crc kubenswrapper[4876]: I1215 07:15:22.526969 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.52694454 podStartE2EDuration="2.52694454s" podCreationTimestamp="2025-12-15 07:15:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:22.519352045 +0000 UTC m=+1448.090494966" watchObservedRunningTime="2025-12-15 07:15:22.52694454 +0000 UTC m=+1448.098087451" Dec 15 07:15:22 crc kubenswrapper[4876]: I1215 07:15:22.720529 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de596f2b-465e-4b52-9b25-c41022b105b1" path="/var/lib/kubelet/pods/de596f2b-465e-4b52-9b25-c41022b105b1/volumes" Dec 15 07:15:24 crc kubenswrapper[4876]: I1215 07:15:24.719764 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 15 07:15:24 crc kubenswrapper[4876]: I1215 07:15:24.720205 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 15 07:15:24 crc kubenswrapper[4876]: I1215 07:15:24.720594 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 15 07:15:24 crc kubenswrapper[4876]: I1215 07:15:24.720632 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 15 07:15:24 crc kubenswrapper[4876]: I1215 07:15:24.723216 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 15 07:15:24 crc kubenswrapper[4876]: I1215 07:15:24.724301 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 15 07:15:24 crc kubenswrapper[4876]: I1215 07:15:24.923494 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ddd577785-dsmf2"] Dec 15 07:15:24 crc kubenswrapper[4876]: I1215 07:15:24.926672 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:24 crc kubenswrapper[4876]: I1215 07:15:24.931977 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ddd577785-dsmf2"] Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.083317 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-nb\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.083393 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d78lc\" (UniqueName: \"kubernetes.io/projected/db2340b9-e33b-432a-a1e6-df022337da1c-kube-api-access-d78lc\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.083607 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-config\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.083657 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-sb\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.083735 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-svc\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.083819 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-swift-storage-0\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.185029 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-nb\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.185087 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d78lc\" (UniqueName: \"kubernetes.io/projected/db2340b9-e33b-432a-a1e6-df022337da1c-kube-api-access-d78lc\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.185194 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-config\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.185225 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-sb\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.185261 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-svc\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.185873 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-swift-storage-0\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.186601 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-sb\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.186600 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-svc\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.186607 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-config\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.186663 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-nb\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.187323 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-swift-storage-0\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.206047 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d78lc\" (UniqueName: \"kubernetes.io/projected/db2340b9-e33b-432a-a1e6-df022337da1c-kube-api-access-d78lc\") pod \"dnsmasq-dns-5ddd577785-dsmf2\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.258272 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:25 crc kubenswrapper[4876]: I1215 07:15:25.585655 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ddd577785-dsmf2"] Dec 15 07:15:26 crc kubenswrapper[4876]: I1215 07:15:26.210393 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:26 crc kubenswrapper[4876]: I1215 07:15:26.546371 4876 generic.go:334] "Generic (PLEG): container finished" podID="db2340b9-e33b-432a-a1e6-df022337da1c" containerID="e41e2134ca24c29bb330d58bfc1c524aefcc9b5b0be72e853c4d8698c1fef59b" exitCode=0 Dec 15 07:15:26 crc kubenswrapper[4876]: I1215 07:15:26.546484 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" event={"ID":"db2340b9-e33b-432a-a1e6-df022337da1c","Type":"ContainerDied","Data":"e41e2134ca24c29bb330d58bfc1c524aefcc9b5b0be72e853c4d8698c1fef59b"} Dec 15 07:15:26 crc kubenswrapper[4876]: I1215 07:15:26.546638 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" event={"ID":"db2340b9-e33b-432a-a1e6-df022337da1c","Type":"ContainerStarted","Data":"7e98a905c7b247ec8087d10468e46383c2341416520a7e49dd62daaf5dd9164b"} Dec 15 07:15:26 crc kubenswrapper[4876]: I1215 07:15:26.820049 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:26 crc kubenswrapper[4876]: I1215 07:15:26.820369 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="ceilometer-central-agent" containerID="cri-o://d31aed56f79830fc32e7559cde95bf21945913848c9554d07c4af91ff662751c" gracePeriod=30 Dec 15 07:15:26 crc kubenswrapper[4876]: I1215 07:15:26.820402 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="proxy-httpd" containerID="cri-o://285b6d623463654e74a0ee227ad1e333fcec467b278a9da3f54ccc9c7f8d59ce" gracePeriod=30 Dec 15 07:15:26 crc kubenswrapper[4876]: I1215 07:15:26.820492 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="sg-core" containerID="cri-o://a9cd568082b0d01fcd9ac76f7b4607cb411796beff0e07e6290fe3d8607c05b9" gracePeriod=30 Dec 15 07:15:26 crc kubenswrapper[4876]: I1215 07:15:26.820536 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="ceilometer-notification-agent" containerID="cri-o://5f7084336b1200a91fec207a506ecd053c4f05466ef7f3afe1bca3b5f6a0da3c" gracePeriod=30 Dec 15 07:15:26 crc kubenswrapper[4876]: I1215 07:15:26.839902 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.194:3000/\": EOF" Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.323040 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.323130 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.559764 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" event={"ID":"db2340b9-e33b-432a-a1e6-df022337da1c","Type":"ContainerStarted","Data":"ef7fbf45baeea2399584cf009bc9639d23e5763a916d5051a8fd664fd8dbafa0"} Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.559905 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.563894 4876 generic.go:334] "Generic (PLEG): container finished" podID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerID="285b6d623463654e74a0ee227ad1e333fcec467b278a9da3f54ccc9c7f8d59ce" exitCode=0 Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.563928 4876 generic.go:334] "Generic (PLEG): container finished" podID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerID="a9cd568082b0d01fcd9ac76f7b4607cb411796beff0e07e6290fe3d8607c05b9" exitCode=2 Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.563938 4876 generic.go:334] "Generic (PLEG): container finished" podID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerID="d31aed56f79830fc32e7559cde95bf21945913848c9554d07c4af91ff662751c" exitCode=0 Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.563960 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abfee997-3c99-4413-b345-2fbcd1db2f93","Type":"ContainerDied","Data":"285b6d623463654e74a0ee227ad1e333fcec467b278a9da3f54ccc9c7f8d59ce"} Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.563984 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abfee997-3c99-4413-b345-2fbcd1db2f93","Type":"ContainerDied","Data":"a9cd568082b0d01fcd9ac76f7b4607cb411796beff0e07e6290fe3d8607c05b9"} Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.563997 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abfee997-3c99-4413-b345-2fbcd1db2f93","Type":"ContainerDied","Data":"d31aed56f79830fc32e7559cde95bf21945913848c9554d07c4af91ff662751c"} Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.581800 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" podStartSLOduration=3.581778841 podStartE2EDuration="3.581778841s" podCreationTimestamp="2025-12-15 07:15:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:27.580678322 +0000 UTC m=+1453.151821243" watchObservedRunningTime="2025-12-15 07:15:27.581778841 +0000 UTC m=+1453.152921762" Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.814950 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.815407 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerName="nova-api-api" containerID="cri-o://55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3" gracePeriod=30 Dec 15 07:15:27 crc kubenswrapper[4876]: I1215 07:15:27.815728 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerName="nova-api-log" containerID="cri-o://088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97" gracePeriod=30 Dec 15 07:15:28 crc kubenswrapper[4876]: I1215 07:15:28.575955 4876 generic.go:334] "Generic (PLEG): container finished" podID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerID="088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97" exitCode=143 Dec 15 07:15:28 crc kubenswrapper[4876]: I1215 07:15:28.576044 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb","Type":"ContainerDied","Data":"088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97"} Dec 15 07:15:28 crc kubenswrapper[4876]: I1215 07:15:28.891879 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mvc4z"] Dec 15 07:15:28 crc kubenswrapper[4876]: I1215 07:15:28.905153 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mvc4z"] Dec 15 07:15:28 crc kubenswrapper[4876]: I1215 07:15:28.905268 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.076236 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzmlw\" (UniqueName: \"kubernetes.io/projected/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-kube-api-access-xzmlw\") pod \"redhat-operators-mvc4z\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.076536 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-catalog-content\") pod \"redhat-operators-mvc4z\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.076756 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-utilities\") pod \"redhat-operators-mvc4z\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.179084 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzmlw\" (UniqueName: \"kubernetes.io/projected/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-kube-api-access-xzmlw\") pod \"redhat-operators-mvc4z\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.179230 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-catalog-content\") pod \"redhat-operators-mvc4z\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.179348 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-utilities\") pod \"redhat-operators-mvc4z\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.180007 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-utilities\") pod \"redhat-operators-mvc4z\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.180032 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-catalog-content\") pod \"redhat-operators-mvc4z\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.202801 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzmlw\" (UniqueName: \"kubernetes.io/projected/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-kube-api-access-xzmlw\") pod \"redhat-operators-mvc4z\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.265965 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:29 crc kubenswrapper[4876]: I1215 07:15:29.875762 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mvc4z"] Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.621194 4876 generic.go:334] "Generic (PLEG): container finished" podID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerID="5f7084336b1200a91fec207a506ecd053c4f05466ef7f3afe1bca3b5f6a0da3c" exitCode=0 Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.621386 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abfee997-3c99-4413-b345-2fbcd1db2f93","Type":"ContainerDied","Data":"5f7084336b1200a91fec207a506ecd053c4f05466ef7f3afe1bca3b5f6a0da3c"} Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.624583 4876 generic.go:334] "Generic (PLEG): container finished" podID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerID="11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654" exitCode=0 Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.624620 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvc4z" event={"ID":"2500c4d1-49f3-47c5-a3a4-1c00b4b29844","Type":"ContainerDied","Data":"11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654"} Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.624645 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvc4z" event={"ID":"2500c4d1-49f3-47c5-a3a4-1c00b4b29844","Type":"ContainerStarted","Data":"900274809a97bc379319844816951a3e816f3f0665cceb9fb23313f873e51845"} Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.780806 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.930380 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-combined-ca-bundle\") pod \"abfee997-3c99-4413-b345-2fbcd1db2f93\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.930914 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-config-data\") pod \"abfee997-3c99-4413-b345-2fbcd1db2f93\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.931057 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-run-httpd\") pod \"abfee997-3c99-4413-b345-2fbcd1db2f93\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.931092 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mz9b8\" (UniqueName: \"kubernetes.io/projected/abfee997-3c99-4413-b345-2fbcd1db2f93-kube-api-access-mz9b8\") pod \"abfee997-3c99-4413-b345-2fbcd1db2f93\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.931160 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-log-httpd\") pod \"abfee997-3c99-4413-b345-2fbcd1db2f93\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.931268 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-ceilometer-tls-certs\") pod \"abfee997-3c99-4413-b345-2fbcd1db2f93\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.931332 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-sg-core-conf-yaml\") pod \"abfee997-3c99-4413-b345-2fbcd1db2f93\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.931418 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-scripts\") pod \"abfee997-3c99-4413-b345-2fbcd1db2f93\" (UID: \"abfee997-3c99-4413-b345-2fbcd1db2f93\") " Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.931530 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "abfee997-3c99-4413-b345-2fbcd1db2f93" (UID: "abfee997-3c99-4413-b345-2fbcd1db2f93"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.931973 4876 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.931993 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "abfee997-3c99-4413-b345-2fbcd1db2f93" (UID: "abfee997-3c99-4413-b345-2fbcd1db2f93"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.950287 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abfee997-3c99-4413-b345-2fbcd1db2f93-kube-api-access-mz9b8" (OuterVolumeSpecName: "kube-api-access-mz9b8") pod "abfee997-3c99-4413-b345-2fbcd1db2f93" (UID: "abfee997-3c99-4413-b345-2fbcd1db2f93"). InnerVolumeSpecName "kube-api-access-mz9b8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.950278 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-scripts" (OuterVolumeSpecName: "scripts") pod "abfee997-3c99-4413-b345-2fbcd1db2f93" (UID: "abfee997-3c99-4413-b345-2fbcd1db2f93"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:30 crc kubenswrapper[4876]: I1215 07:15:30.976820 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "abfee997-3c99-4413-b345-2fbcd1db2f93" (UID: "abfee997-3c99-4413-b345-2fbcd1db2f93"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.022005 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "abfee997-3c99-4413-b345-2fbcd1db2f93" (UID: "abfee997-3c99-4413-b345-2fbcd1db2f93"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.033397 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "abfee997-3c99-4413-b345-2fbcd1db2f93" (UID: "abfee997-3c99-4413-b345-2fbcd1db2f93"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.033652 4876 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.033675 4876 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.033685 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.033694 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.033703 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mz9b8\" (UniqueName: \"kubernetes.io/projected/abfee997-3c99-4413-b345-2fbcd1db2f93-kube-api-access-mz9b8\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.033716 4876 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/abfee997-3c99-4413-b345-2fbcd1db2f93-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.073365 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-config-data" (OuterVolumeSpecName: "config-data") pod "abfee997-3c99-4413-b345-2fbcd1db2f93" (UID: "abfee997-3c99-4413-b345-2fbcd1db2f93"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.136344 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/abfee997-3c99-4413-b345-2fbcd1db2f93-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.210608 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.231701 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.328919 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.442721 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-logs\") pod \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.443213 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-logs" (OuterVolumeSpecName: "logs") pod "6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" (UID: "6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.443283 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-config-data\") pod \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.443453 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-combined-ca-bundle\") pod \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.443911 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xlfzw\" (UniqueName: \"kubernetes.io/projected/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-kube-api-access-xlfzw\") pod \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\" (UID: \"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb\") " Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.444996 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.455272 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-kube-api-access-xlfzw" (OuterVolumeSpecName: "kube-api-access-xlfzw") pod "6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" (UID: "6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb"). InnerVolumeSpecName "kube-api-access-xlfzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.473309 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-config-data" (OuterVolumeSpecName: "config-data") pod "6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" (UID: "6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.475244 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" (UID: "6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.546440 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.546486 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.546500 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xlfzw\" (UniqueName: \"kubernetes.io/projected/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb-kube-api-access-xlfzw\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.638628 4876 generic.go:334] "Generic (PLEG): container finished" podID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerID="55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3" exitCode=0 Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.638735 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb","Type":"ContainerDied","Data":"55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3"} Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.638774 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb","Type":"ContainerDied","Data":"bf661437a0d06040629e633a84d17297c214d1911d0e77e5168b10145cf66da9"} Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.638795 4876 scope.go:117] "RemoveContainer" containerID="55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.638960 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.648608 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.648610 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"abfee997-3c99-4413-b345-2fbcd1db2f93","Type":"ContainerDied","Data":"2d1fc830e592b458ed439339b005961ec2107ef80653b70c2074983959480955"} Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.661128 4876 scope.go:117] "RemoveContainer" containerID="088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.685991 4876 scope.go:117] "RemoveContainer" containerID="55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.685999 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:15:31 crc kubenswrapper[4876]: E1215 07:15:31.695221 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3\": container with ID starting with 55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3 not found: ID does not exist" containerID="55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.695292 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3"} err="failed to get container status \"55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3\": rpc error: code = NotFound desc = could not find container \"55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3\": container with ID starting with 55893db7ea2b8d4d6638b53c890b129a7650bbf0e01257731cc67b3f90ab1bf3 not found: ID does not exist" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.695339 4876 scope.go:117] "RemoveContainer" containerID="088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97" Dec 15 07:15:31 crc kubenswrapper[4876]: E1215 07:15:31.695922 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97\": container with ID starting with 088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97 not found: ID does not exist" containerID="088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.695954 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97"} err="failed to get container status \"088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97\": rpc error: code = NotFound desc = could not find container \"088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97\": container with ID starting with 088a29ea2cc94c1a1df6498f462bc4c2b3e749504172edb36a1ee8a43a406a97 not found: ID does not exist" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.695978 4876 scope.go:117] "RemoveContainer" containerID="285b6d623463654e74a0ee227ad1e333fcec467b278a9da3f54ccc9c7f8d59ce" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.700616 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.723297 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.744788 4876 scope.go:117] "RemoveContainer" containerID="a9cd568082b0d01fcd9ac76f7b4607cb411796beff0e07e6290fe3d8607c05b9" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.748030 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.772766 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.784996 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:31 crc kubenswrapper[4876]: E1215 07:15:31.785651 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerName="nova-api-log" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.785678 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerName="nova-api-log" Dec 15 07:15:31 crc kubenswrapper[4876]: E1215 07:15:31.785694 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="proxy-httpd" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.785704 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="proxy-httpd" Dec 15 07:15:31 crc kubenswrapper[4876]: E1215 07:15:31.785717 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerName="nova-api-api" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.785730 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerName="nova-api-api" Dec 15 07:15:31 crc kubenswrapper[4876]: E1215 07:15:31.785745 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="ceilometer-central-agent" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.785751 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="ceilometer-central-agent" Dec 15 07:15:31 crc kubenswrapper[4876]: E1215 07:15:31.785767 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="sg-core" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.785773 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="sg-core" Dec 15 07:15:31 crc kubenswrapper[4876]: E1215 07:15:31.785808 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="ceilometer-notification-agent" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.785814 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="ceilometer-notification-agent" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.786019 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="proxy-httpd" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.786029 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="sg-core" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.786038 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerName="nova-api-log" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.786047 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="ceilometer-notification-agent" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.786058 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" containerName="ceilometer-central-agent" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.786069 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" containerName="nova-api-api" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.788022 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.790444 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.790650 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.791507 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.804143 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.805305 4876 scope.go:117] "RemoveContainer" containerID="5f7084336b1200a91fec207a506ecd053c4f05466ef7f3afe1bca3b5f6a0da3c" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.821394 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.823532 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.826584 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.826949 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.827631 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.841658 4876 scope.go:117] "RemoveContainer" containerID="d31aed56f79830fc32e7559cde95bf21945913848c9554d07c4af91ff662751c" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.860296 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.952914 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953356 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-internal-tls-certs\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953406 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-run-httpd\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953440 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkgcp\" (UniqueName: \"kubernetes.io/projected/dc469baf-8fc6-45d9-99f5-1079ebb0d435-kube-api-access-pkgcp\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953480 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-log-httpd\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953563 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-config-data\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953647 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-public-tls-certs\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953714 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953738 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc469baf-8fc6-45d9-99f5-1079ebb0d435-logs\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953804 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953916 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-config-data\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.953983 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-scripts\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.954010 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ssng\" (UniqueName: \"kubernetes.io/projected/efef2015-cf60-49ff-b90d-f1120822806c-kube-api-access-4ssng\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.954067 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.960584 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-9tmcs"] Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.963031 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.968744 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.970389 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 15 07:15:31 crc kubenswrapper[4876]: I1215 07:15:31.977407 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-9tmcs"] Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055287 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055332 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-internal-tls-certs\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055373 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-run-httpd\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055403 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkgcp\" (UniqueName: \"kubernetes.io/projected/dc469baf-8fc6-45d9-99f5-1079ebb0d435-kube-api-access-pkgcp\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055433 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-log-httpd\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055463 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-scripts\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055494 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-config-data\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055513 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-public-tls-certs\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055533 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055549 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc469baf-8fc6-45d9-99f5-1079ebb0d435-logs\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055568 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055593 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mt6p\" (UniqueName: \"kubernetes.io/projected/7ef8833d-2cde-4dae-b9d2-f77f18726a29-kube-api-access-6mt6p\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055615 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-config-data\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055641 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-config-data\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055669 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-scripts\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055689 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ssng\" (UniqueName: \"kubernetes.io/projected/efef2015-cf60-49ff-b90d-f1120822806c-kube-api-access-4ssng\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055712 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.055730 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.056273 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-run-httpd\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.056557 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc469baf-8fc6-45d9-99f5-1079ebb0d435-logs\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.057403 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-log-httpd\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.060221 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.060346 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.060359 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-internal-tls-certs\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.061349 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-config-data\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.064202 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.065481 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-scripts\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.066010 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-public-tls-certs\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.069234 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-config-data\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.069830 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.074558 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkgcp\" (UniqueName: \"kubernetes.io/projected/dc469baf-8fc6-45d9-99f5-1079ebb0d435-kube-api-access-pkgcp\") pod \"nova-api-0\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.078546 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ssng\" (UniqueName: \"kubernetes.io/projected/efef2015-cf60-49ff-b90d-f1120822806c-kube-api-access-4ssng\") pod \"ceilometer-0\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.117024 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.146332 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.162201 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.162338 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-scripts\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.162417 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mt6p\" (UniqueName: \"kubernetes.io/projected/7ef8833d-2cde-4dae-b9d2-f77f18726a29-kube-api-access-6mt6p\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.162464 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-config-data\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.168661 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-scripts\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.170277 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-config-data\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.170695 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.181121 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mt6p\" (UniqueName: \"kubernetes.io/projected/7ef8833d-2cde-4dae-b9d2-f77f18726a29-kube-api-access-6mt6p\") pod \"nova-cell1-cell-mapping-9tmcs\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.283069 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.654809 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.673992 4876 generic.go:334] "Generic (PLEG): container finished" podID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerID="0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0" exitCode=0 Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.674075 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvc4z" event={"ID":"2500c4d1-49f3-47c5-a3a4-1c00b4b29844","Type":"ContainerDied","Data":"0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0"} Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.728612 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb" path="/var/lib/kubelet/pods/6b4ddac3-9a40-4d3c-8e40-3757e43c4ceb/volumes" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.732812 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abfee997-3c99-4413-b345-2fbcd1db2f93" path="/var/lib/kubelet/pods/abfee997-3c99-4413-b345-2fbcd1db2f93/volumes" Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.760255 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:32 crc kubenswrapper[4876]: W1215 07:15:32.762623 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc469baf_8fc6_45d9_99f5_1079ebb0d435.slice/crio-1ec047acc92d017e12ca2ec1a1e566fca8efe3223a74dce9c03d521131ce9367 WatchSource:0}: Error finding container 1ec047acc92d017e12ca2ec1a1e566fca8efe3223a74dce9c03d521131ce9367: Status 404 returned error can't find the container with id 1ec047acc92d017e12ca2ec1a1e566fca8efe3223a74dce9c03d521131ce9367 Dec 15 07:15:32 crc kubenswrapper[4876]: I1215 07:15:32.877955 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-9tmcs"] Dec 15 07:15:33 crc kubenswrapper[4876]: I1215 07:15:33.684021 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvc4z" event={"ID":"2500c4d1-49f3-47c5-a3a4-1c00b4b29844","Type":"ContainerStarted","Data":"441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a"} Dec 15 07:15:33 crc kubenswrapper[4876]: I1215 07:15:33.685557 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efef2015-cf60-49ff-b90d-f1120822806c","Type":"ContainerStarted","Data":"34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1"} Dec 15 07:15:33 crc kubenswrapper[4876]: I1215 07:15:33.685586 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efef2015-cf60-49ff-b90d-f1120822806c","Type":"ContainerStarted","Data":"b1cabee56756691db02fdfd485f6fa75c9725c2dc24de542512d61abf72fe975"} Dec 15 07:15:33 crc kubenswrapper[4876]: I1215 07:15:33.686863 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9tmcs" event={"ID":"7ef8833d-2cde-4dae-b9d2-f77f18726a29","Type":"ContainerStarted","Data":"1138bf311330adc926716e644beff1672cd4d4c45348c5a819155d3c4981d787"} Dec 15 07:15:33 crc kubenswrapper[4876]: I1215 07:15:33.686894 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9tmcs" event={"ID":"7ef8833d-2cde-4dae-b9d2-f77f18726a29","Type":"ContainerStarted","Data":"86794be0744ac172b26083aebf73acdcfd194215ead54c6482f6595083a40670"} Dec 15 07:15:33 crc kubenswrapper[4876]: I1215 07:15:33.688741 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc469baf-8fc6-45d9-99f5-1079ebb0d435","Type":"ContainerStarted","Data":"80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b"} Dec 15 07:15:33 crc kubenswrapper[4876]: I1215 07:15:33.688774 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc469baf-8fc6-45d9-99f5-1079ebb0d435","Type":"ContainerStarted","Data":"b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e"} Dec 15 07:15:33 crc kubenswrapper[4876]: I1215 07:15:33.688787 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc469baf-8fc6-45d9-99f5-1079ebb0d435","Type":"ContainerStarted","Data":"1ec047acc92d017e12ca2ec1a1e566fca8efe3223a74dce9c03d521131ce9367"} Dec 15 07:15:33 crc kubenswrapper[4876]: I1215 07:15:33.701446 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mvc4z" podStartSLOduration=3.214525809 podStartE2EDuration="5.701428802s" podCreationTimestamp="2025-12-15 07:15:28 +0000 UTC" firstStartedPulling="2025-12-15 07:15:30.627260717 +0000 UTC m=+1456.198403628" lastFinishedPulling="2025-12-15 07:15:33.11416371 +0000 UTC m=+1458.685306621" observedRunningTime="2025-12-15 07:15:33.700809656 +0000 UTC m=+1459.271952567" watchObservedRunningTime="2025-12-15 07:15:33.701428802 +0000 UTC m=+1459.272571703" Dec 15 07:15:33 crc kubenswrapper[4876]: I1215 07:15:33.733592 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.733565185 podStartE2EDuration="2.733565185s" podCreationTimestamp="2025-12-15 07:15:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:33.724574714 +0000 UTC m=+1459.295717625" watchObservedRunningTime="2025-12-15 07:15:33.733565185 +0000 UTC m=+1459.304708096" Dec 15 07:15:34 crc kubenswrapper[4876]: I1215 07:15:34.703560 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efef2015-cf60-49ff-b90d-f1120822806c","Type":"ContainerStarted","Data":"48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9"} Dec 15 07:15:34 crc kubenswrapper[4876]: I1215 07:15:34.739949 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-9tmcs" podStartSLOduration=3.739932124 podStartE2EDuration="3.739932124s" podCreationTimestamp="2025-12-15 07:15:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:33.747377206 +0000 UTC m=+1459.318520117" watchObservedRunningTime="2025-12-15 07:15:34.739932124 +0000 UTC m=+1460.311075035" Dec 15 07:15:35 crc kubenswrapper[4876]: I1215 07:15:35.260540 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:15:35 crc kubenswrapper[4876]: I1215 07:15:35.330185 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557bbc7df7-cgl7f"] Dec 15 07:15:35 crc kubenswrapper[4876]: I1215 07:15:35.330546 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" podUID="e5cde5f5-9092-4cc3-9129-e303d0a4567a" containerName="dnsmasq-dns" containerID="cri-o://dfacf79125cbb126071908d81e1cab21e36699401feb61661cb7dae4abf7e102" gracePeriod=10 Dec 15 07:15:35 crc kubenswrapper[4876]: I1215 07:15:35.563618 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" podUID="e5cde5f5-9092-4cc3-9129-e303d0a4567a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.185:5353: connect: connection refused" Dec 15 07:15:35 crc kubenswrapper[4876]: I1215 07:15:35.721127 4876 generic.go:334] "Generic (PLEG): container finished" podID="e5cde5f5-9092-4cc3-9129-e303d0a4567a" containerID="dfacf79125cbb126071908d81e1cab21e36699401feb61661cb7dae4abf7e102" exitCode=0 Dec 15 07:15:35 crc kubenswrapper[4876]: I1215 07:15:35.721167 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" event={"ID":"e5cde5f5-9092-4cc3-9129-e303d0a4567a","Type":"ContainerDied","Data":"dfacf79125cbb126071908d81e1cab21e36699401feb61661cb7dae4abf7e102"} Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.733174 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.735651 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efef2015-cf60-49ff-b90d-f1120822806c","Type":"ContainerStarted","Data":"3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0"} Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.738471 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" event={"ID":"e5cde5f5-9092-4cc3-9129-e303d0a4567a","Type":"ContainerDied","Data":"75a931914f1eb45c52a2d53ecc5fdf2b65f2f0479d547dfe8c41be213aae451b"} Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.738527 4876 scope.go:117] "RemoveContainer" containerID="dfacf79125cbb126071908d81e1cab21e36699401feb61661cb7dae4abf7e102" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.738676 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-557bbc7df7-cgl7f" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.790517 4876 scope.go:117] "RemoveContainer" containerID="72506abee5e53bf8629d716ec2a45779f4d48bf8a3fb87fc54c11aad761fb1de" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.871604 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-swift-storage-0\") pod \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.872032 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brpjk\" (UniqueName: \"kubernetes.io/projected/e5cde5f5-9092-4cc3-9129-e303d0a4567a-kube-api-access-brpjk\") pod \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.872125 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-sb\") pod \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.872157 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-nb\") pod \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.872199 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-config\") pod \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.872228 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-svc\") pod \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\" (UID: \"e5cde5f5-9092-4cc3-9129-e303d0a4567a\") " Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.907444 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5cde5f5-9092-4cc3-9129-e303d0a4567a-kube-api-access-brpjk" (OuterVolumeSpecName: "kube-api-access-brpjk") pod "e5cde5f5-9092-4cc3-9129-e303d0a4567a" (UID: "e5cde5f5-9092-4cc3-9129-e303d0a4567a"). InnerVolumeSpecName "kube-api-access-brpjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.942474 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e5cde5f5-9092-4cc3-9129-e303d0a4567a" (UID: "e5cde5f5-9092-4cc3-9129-e303d0a4567a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.946550 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e5cde5f5-9092-4cc3-9129-e303d0a4567a" (UID: "e5cde5f5-9092-4cc3-9129-e303d0a4567a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.948703 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e5cde5f5-9092-4cc3-9129-e303d0a4567a" (UID: "e5cde5f5-9092-4cc3-9129-e303d0a4567a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.953505 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-config" (OuterVolumeSpecName: "config") pod "e5cde5f5-9092-4cc3-9129-e303d0a4567a" (UID: "e5cde5f5-9092-4cc3-9129-e303d0a4567a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.975324 4876 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.975424 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brpjk\" (UniqueName: \"kubernetes.io/projected/e5cde5f5-9092-4cc3-9129-e303d0a4567a-kube-api-access-brpjk\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.975443 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.975456 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.975466 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:36 crc kubenswrapper[4876]: I1215 07:15:36.993052 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e5cde5f5-9092-4cc3-9129-e303d0a4567a" (UID: "e5cde5f5-9092-4cc3-9129-e303d0a4567a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:15:37 crc kubenswrapper[4876]: I1215 07:15:37.076678 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5cde5f5-9092-4cc3-9129-e303d0a4567a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:37 crc kubenswrapper[4876]: I1215 07:15:37.081286 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-557bbc7df7-cgl7f"] Dec 15 07:15:37 crc kubenswrapper[4876]: I1215 07:15:37.092656 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-557bbc7df7-cgl7f"] Dec 15 07:15:37 crc kubenswrapper[4876]: I1215 07:15:37.757099 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efef2015-cf60-49ff-b90d-f1120822806c","Type":"ContainerStarted","Data":"76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9"} Dec 15 07:15:37 crc kubenswrapper[4876]: I1215 07:15:37.757572 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 15 07:15:37 crc kubenswrapper[4876]: I1215 07:15:37.786563 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.955994631 podStartE2EDuration="6.786535849s" podCreationTimestamp="2025-12-15 07:15:31 +0000 UTC" firstStartedPulling="2025-12-15 07:15:32.662255722 +0000 UTC m=+1458.233398623" lastFinishedPulling="2025-12-15 07:15:37.49279693 +0000 UTC m=+1463.063939841" observedRunningTime="2025-12-15 07:15:37.777119986 +0000 UTC m=+1463.348262897" watchObservedRunningTime="2025-12-15 07:15:37.786535849 +0000 UTC m=+1463.357678760" Dec 15 07:15:38 crc kubenswrapper[4876]: I1215 07:15:38.720828 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5cde5f5-9092-4cc3-9129-e303d0a4567a" path="/var/lib/kubelet/pods/e5cde5f5-9092-4cc3-9129-e303d0a4567a/volumes" Dec 15 07:15:38 crc kubenswrapper[4876]: I1215 07:15:38.772535 4876 generic.go:334] "Generic (PLEG): container finished" podID="7ef8833d-2cde-4dae-b9d2-f77f18726a29" containerID="1138bf311330adc926716e644beff1672cd4d4c45348c5a819155d3c4981d787" exitCode=0 Dec 15 07:15:38 crc kubenswrapper[4876]: I1215 07:15:38.772624 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9tmcs" event={"ID":"7ef8833d-2cde-4dae-b9d2-f77f18726a29","Type":"ContainerDied","Data":"1138bf311330adc926716e644beff1672cd4d4c45348c5a819155d3c4981d787"} Dec 15 07:15:39 crc kubenswrapper[4876]: I1215 07:15:39.266753 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:39 crc kubenswrapper[4876]: I1215 07:15:39.268135 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:39 crc kubenswrapper[4876]: I1215 07:15:39.320496 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:39 crc kubenswrapper[4876]: I1215 07:15:39.835457 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:39 crc kubenswrapper[4876]: I1215 07:15:39.879694 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mvc4z"] Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.136825 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.233804 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-scripts\") pod \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.234018 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-combined-ca-bundle\") pod \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.234115 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-config-data\") pod \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.234215 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mt6p\" (UniqueName: \"kubernetes.io/projected/7ef8833d-2cde-4dae-b9d2-f77f18726a29-kube-api-access-6mt6p\") pod \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\" (UID: \"7ef8833d-2cde-4dae-b9d2-f77f18726a29\") " Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.251216 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-scripts" (OuterVolumeSpecName: "scripts") pod "7ef8833d-2cde-4dae-b9d2-f77f18726a29" (UID: "7ef8833d-2cde-4dae-b9d2-f77f18726a29"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.254717 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ef8833d-2cde-4dae-b9d2-f77f18726a29-kube-api-access-6mt6p" (OuterVolumeSpecName: "kube-api-access-6mt6p") pod "7ef8833d-2cde-4dae-b9d2-f77f18726a29" (UID: "7ef8833d-2cde-4dae-b9d2-f77f18726a29"). InnerVolumeSpecName "kube-api-access-6mt6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.264951 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ef8833d-2cde-4dae-b9d2-f77f18726a29" (UID: "7ef8833d-2cde-4dae-b9d2-f77f18726a29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.275951 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-config-data" (OuterVolumeSpecName: "config-data") pod "7ef8833d-2cde-4dae-b9d2-f77f18726a29" (UID: "7ef8833d-2cde-4dae-b9d2-f77f18726a29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.336018 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.336053 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.336068 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ef8833d-2cde-4dae-b9d2-f77f18726a29-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.336080 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mt6p\" (UniqueName: \"kubernetes.io/projected/7ef8833d-2cde-4dae-b9d2-f77f18726a29-kube-api-access-6mt6p\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.794615 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-9tmcs" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.794619 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-9tmcs" event={"ID":"7ef8833d-2cde-4dae-b9d2-f77f18726a29","Type":"ContainerDied","Data":"86794be0744ac172b26083aebf73acdcfd194215ead54c6482f6595083a40670"} Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.794988 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86794be0744ac172b26083aebf73acdcfd194215ead54c6482f6595083a40670" Dec 15 07:15:40 crc kubenswrapper[4876]: E1215 07:15:40.934212 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ef8833d_2cde_4dae_b9d2_f77f18726a29.slice/crio-86794be0744ac172b26083aebf73acdcfd194215ead54c6482f6595083a40670\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ef8833d_2cde_4dae_b9d2_f77f18726a29.slice\": RecentStats: unable to find data in memory cache]" Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.983853 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:40 crc kubenswrapper[4876]: I1215 07:15:40.984831 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="167c4c13-f766-4ecc-8d1c-0ba875acaf9b" containerName="nova-scheduler-scheduler" containerID="cri-o://e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2" gracePeriod=30 Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.002870 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.003150 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" containerName="nova-api-log" containerID="cri-o://b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e" gracePeriod=30 Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.003292 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" containerName="nova-api-api" containerID="cri-o://80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b" gracePeriod=30 Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.025226 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.025518 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-log" containerID="cri-o://3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590" gracePeriod=30 Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.025587 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-metadata" containerID="cri-o://1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e" gracePeriod=30 Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.597008 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.780571 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-internal-tls-certs\") pod \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.780766 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkgcp\" (UniqueName: \"kubernetes.io/projected/dc469baf-8fc6-45d9-99f5-1079ebb0d435-kube-api-access-pkgcp\") pod \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.780789 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-combined-ca-bundle\") pod \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.780879 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc469baf-8fc6-45d9-99f5-1079ebb0d435-logs\") pod \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.780919 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-public-tls-certs\") pod \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.780956 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-config-data\") pod \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\" (UID: \"dc469baf-8fc6-45d9-99f5-1079ebb0d435\") " Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.781295 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc469baf-8fc6-45d9-99f5-1079ebb0d435-logs" (OuterVolumeSpecName: "logs") pod "dc469baf-8fc6-45d9-99f5-1079ebb0d435" (UID: "dc469baf-8fc6-45d9-99f5-1079ebb0d435"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.783619 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc469baf-8fc6-45d9-99f5-1079ebb0d435-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.787448 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc469baf-8fc6-45d9-99f5-1079ebb0d435-kube-api-access-pkgcp" (OuterVolumeSpecName: "kube-api-access-pkgcp") pod "dc469baf-8fc6-45d9-99f5-1079ebb0d435" (UID: "dc469baf-8fc6-45d9-99f5-1079ebb0d435"). InnerVolumeSpecName "kube-api-access-pkgcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.804708 4876 generic.go:334] "Generic (PLEG): container finished" podID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" containerID="80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b" exitCode=0 Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.804745 4876 generic.go:334] "Generic (PLEG): container finished" podID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" containerID="b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e" exitCode=143 Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.804771 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.804804 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc469baf-8fc6-45d9-99f5-1079ebb0d435","Type":"ContainerDied","Data":"80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b"} Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.804834 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc469baf-8fc6-45d9-99f5-1079ebb0d435","Type":"ContainerDied","Data":"b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e"} Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.804847 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"dc469baf-8fc6-45d9-99f5-1079ebb0d435","Type":"ContainerDied","Data":"1ec047acc92d017e12ca2ec1a1e566fca8efe3223a74dce9c03d521131ce9367"} Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.804864 4876 scope.go:117] "RemoveContainer" containerID="80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.807664 4876 generic.go:334] "Generic (PLEG): container finished" podID="50416e26-c694-47d8-a4af-109bf57f6f36" containerID="3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590" exitCode=143 Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.807802 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50416e26-c694-47d8-a4af-109bf57f6f36","Type":"ContainerDied","Data":"3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590"} Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.807885 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mvc4z" podUID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerName="registry-server" containerID="cri-o://441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a" gracePeriod=2 Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.819304 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc469baf-8fc6-45d9-99f5-1079ebb0d435" (UID: "dc469baf-8fc6-45d9-99f5-1079ebb0d435"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.820990 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-config-data" (OuterVolumeSpecName: "config-data") pod "dc469baf-8fc6-45d9-99f5-1079ebb0d435" (UID: "dc469baf-8fc6-45d9-99f5-1079ebb0d435"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.842318 4876 scope.go:117] "RemoveContainer" containerID="b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.850841 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dc469baf-8fc6-45d9-99f5-1079ebb0d435" (UID: "dc469baf-8fc6-45d9-99f5-1079ebb0d435"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.857490 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dc469baf-8fc6-45d9-99f5-1079ebb0d435" (UID: "dc469baf-8fc6-45d9-99f5-1079ebb0d435"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.866724 4876 scope.go:117] "RemoveContainer" containerID="80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b" Dec 15 07:15:41 crc kubenswrapper[4876]: E1215 07:15:41.870270 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b\": container with ID starting with 80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b not found: ID does not exist" containerID="80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.870307 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b"} err="failed to get container status \"80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b\": rpc error: code = NotFound desc = could not find container \"80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b\": container with ID starting with 80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b not found: ID does not exist" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.870331 4876 scope.go:117] "RemoveContainer" containerID="b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e" Dec 15 07:15:41 crc kubenswrapper[4876]: E1215 07:15:41.872406 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e\": container with ID starting with b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e not found: ID does not exist" containerID="b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.872484 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e"} err="failed to get container status \"b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e\": rpc error: code = NotFound desc = could not find container \"b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e\": container with ID starting with b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e not found: ID does not exist" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.872538 4876 scope.go:117] "RemoveContainer" containerID="80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.873151 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b"} err="failed to get container status \"80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b\": rpc error: code = NotFound desc = could not find container \"80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b\": container with ID starting with 80f203622e9279703e1789a9fc790db048230ac33dffac1b3114cfc791f3f45b not found: ID does not exist" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.873177 4876 scope.go:117] "RemoveContainer" containerID="b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.873513 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e"} err="failed to get container status \"b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e\": rpc error: code = NotFound desc = could not find container \"b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e\": container with ID starting with b354825bb08b0550d25fe0a87316856cbc1fae18713302daa75ba2ba9b448f5e not found: ID does not exist" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.884586 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.884624 4876 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.884634 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.884642 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkgcp\" (UniqueName: \"kubernetes.io/projected/dc469baf-8fc6-45d9-99f5-1079ebb0d435-kube-api-access-pkgcp\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:41 crc kubenswrapper[4876]: I1215 07:15:41.884651 4876 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc469baf-8fc6-45d9-99f5-1079ebb0d435-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.207150 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.215930 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.233214 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.233695 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" containerName="nova-api-api" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.233756 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" containerName="nova-api-api" Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.233772 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5cde5f5-9092-4cc3-9129-e303d0a4567a" containerName="init" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.233780 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5cde5f5-9092-4cc3-9129-e303d0a4567a" containerName="init" Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.233794 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ef8833d-2cde-4dae-b9d2-f77f18726a29" containerName="nova-manage" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.233800 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ef8833d-2cde-4dae-b9d2-f77f18726a29" containerName="nova-manage" Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.233833 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" containerName="nova-api-log" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.233840 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" containerName="nova-api-log" Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.233851 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5cde5f5-9092-4cc3-9129-e303d0a4567a" containerName="dnsmasq-dns" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.233856 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5cde5f5-9092-4cc3-9129-e303d0a4567a" containerName="dnsmasq-dns" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.234132 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5cde5f5-9092-4cc3-9129-e303d0a4567a" containerName="dnsmasq-dns" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.234155 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" containerName="nova-api-api" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.234174 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" containerName="nova-api-log" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.234209 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ef8833d-2cde-4dae-b9d2-f77f18726a29" containerName="nova-manage" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.235720 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.241707 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.241942 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.242256 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.251878 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.339732 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.396677 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.396767 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-config-data\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.396820 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.396912 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq924\" (UniqueName: \"kubernetes.io/projected/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-kube-api-access-fq924\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.396990 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-public-tls-certs\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.397046 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-logs\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.483162 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.500397 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-config-data\") pod \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.500487 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-utilities\") pod \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.500537 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhxjn\" (UniqueName: \"kubernetes.io/projected/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-kube-api-access-qhxjn\") pod \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.500569 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-combined-ca-bundle\") pod \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\" (UID: \"167c4c13-f766-4ecc-8d1c-0ba875acaf9b\") " Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.500661 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzmlw\" (UniqueName: \"kubernetes.io/projected/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-kube-api-access-xzmlw\") pod \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.500688 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-catalog-content\") pod \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\" (UID: \"2500c4d1-49f3-47c5-a3a4-1c00b4b29844\") " Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.500913 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq924\" (UniqueName: \"kubernetes.io/projected/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-kube-api-access-fq924\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.500967 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-public-tls-certs\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.501000 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-logs\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.501044 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.501094 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-config-data\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.501225 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.507069 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-logs\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.507193 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.508033 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-utilities" (OuterVolumeSpecName: "utilities") pod "2500c4d1-49f3-47c5-a3a4-1c00b4b29844" (UID: "2500c4d1-49f3-47c5-a3a4-1c00b4b29844"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.510029 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-public-tls-certs\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.514970 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-kube-api-access-qhxjn" (OuterVolumeSpecName: "kube-api-access-qhxjn") pod "167c4c13-f766-4ecc-8d1c-0ba875acaf9b" (UID: "167c4c13-f766-4ecc-8d1c-0ba875acaf9b"). InnerVolumeSpecName "kube-api-access-qhxjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.515028 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-kube-api-access-xzmlw" (OuterVolumeSpecName: "kube-api-access-xzmlw") pod "2500c4d1-49f3-47c5-a3a4-1c00b4b29844" (UID: "2500c4d1-49f3-47c5-a3a4-1c00b4b29844"). InnerVolumeSpecName "kube-api-access-xzmlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.515522 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.522834 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-config-data\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.528447 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq924\" (UniqueName: \"kubernetes.io/projected/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-kube-api-access-fq924\") pod \"nova-api-0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.538940 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "167c4c13-f766-4ecc-8d1c-0ba875acaf9b" (UID: "167c4c13-f766-4ecc-8d1c-0ba875acaf9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.550343 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-config-data" (OuterVolumeSpecName: "config-data") pod "167c4c13-f766-4ecc-8d1c-0ba875acaf9b" (UID: "167c4c13-f766-4ecc-8d1c-0ba875acaf9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.605775 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.605802 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhxjn\" (UniqueName: \"kubernetes.io/projected/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-kube-api-access-qhxjn\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.605811 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.605819 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzmlw\" (UniqueName: \"kubernetes.io/projected/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-kube-api-access-xzmlw\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.605827 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/167c4c13-f766-4ecc-8d1c-0ba875acaf9b-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.657011 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2500c4d1-49f3-47c5-a3a4-1c00b4b29844" (UID: "2500c4d1-49f3-47c5-a3a4-1c00b4b29844"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.674139 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.707245 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2500c4d1-49f3-47c5-a3a4-1c00b4b29844-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.716867 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc469baf-8fc6-45d9-99f5-1079ebb0d435" path="/var/lib/kubelet/pods/dc469baf-8fc6-45d9-99f5-1079ebb0d435/volumes" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.828975 4876 generic.go:334] "Generic (PLEG): container finished" podID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerID="441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a" exitCode=0 Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.829402 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvc4z" event={"ID":"2500c4d1-49f3-47c5-a3a4-1c00b4b29844","Type":"ContainerDied","Data":"441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a"} Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.829584 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mvc4z" event={"ID":"2500c4d1-49f3-47c5-a3a4-1c00b4b29844","Type":"ContainerDied","Data":"900274809a97bc379319844816951a3e816f3f0665cceb9fb23313f873e51845"} Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.829606 4876 scope.go:117] "RemoveContainer" containerID="441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.829467 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mvc4z" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.835735 4876 generic.go:334] "Generic (PLEG): container finished" podID="167c4c13-f766-4ecc-8d1c-0ba875acaf9b" containerID="e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2" exitCode=0 Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.835771 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"167c4c13-f766-4ecc-8d1c-0ba875acaf9b","Type":"ContainerDied","Data":"e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2"} Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.835799 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"167c4c13-f766-4ecc-8d1c-0ba875acaf9b","Type":"ContainerDied","Data":"f36d96a36cb1f8bc59060dffadaf81f4f2a88aa6a7b134a327db8d75160d97c5"} Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.835852 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.866346 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mvc4z"] Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.893375 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mvc4z"] Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.904518 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.919777 4876 scope.go:117] "RemoveContainer" containerID="0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.933082 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.948311 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.948785 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="167c4c13-f766-4ecc-8d1c-0ba875acaf9b" containerName="nova-scheduler-scheduler" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.948810 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="167c4c13-f766-4ecc-8d1c-0ba875acaf9b" containerName="nova-scheduler-scheduler" Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.948842 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerName="extract-content" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.948852 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerName="extract-content" Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.948866 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerName="registry-server" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.948873 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerName="registry-server" Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.948899 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerName="extract-utilities" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.948908 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerName="extract-utilities" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.949194 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="167c4c13-f766-4ecc-8d1c-0ba875acaf9b" containerName="nova-scheduler-scheduler" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.949220 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" containerName="registry-server" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.949865 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.952266 4876 scope.go:117] "RemoveContainer" containerID="11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.952580 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.972216 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.980010 4876 scope.go:117] "RemoveContainer" containerID="441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a" Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.980584 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a\": container with ID starting with 441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a not found: ID does not exist" containerID="441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.980635 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a"} err="failed to get container status \"441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a\": rpc error: code = NotFound desc = could not find container \"441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a\": container with ID starting with 441ea984a698802d2b5e6334a3013ce6f81c86d97df4239c2ade1d92f3cf7c2a not found: ID does not exist" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.980669 4876 scope.go:117] "RemoveContainer" containerID="0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0" Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.981017 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0\": container with ID starting with 0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0 not found: ID does not exist" containerID="0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.981048 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0"} err="failed to get container status \"0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0\": rpc error: code = NotFound desc = could not find container \"0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0\": container with ID starting with 0929d156cc6f03c18f2fc87a4808915ca61167d652aac41d39035902d9dc6fe0 not found: ID does not exist" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.981063 4876 scope.go:117] "RemoveContainer" containerID="11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654" Dec 15 07:15:42 crc kubenswrapper[4876]: E1215 07:15:42.981406 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654\": container with ID starting with 11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654 not found: ID does not exist" containerID="11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.981439 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654"} err="failed to get container status \"11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654\": rpc error: code = NotFound desc = could not find container \"11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654\": container with ID starting with 11138b573cf1b09ae5ed5067c40f526b3db9424c58ac9811e73ac09abd813654 not found: ID does not exist" Dec 15 07:15:42 crc kubenswrapper[4876]: I1215 07:15:42.981459 4876 scope.go:117] "RemoveContainer" containerID="e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.007760 4876 scope.go:117] "RemoveContainer" containerID="e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2" Dec 15 07:15:43 crc kubenswrapper[4876]: E1215 07:15:43.008981 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2\": container with ID starting with e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2 not found: ID does not exist" containerID="e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.009030 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2"} err="failed to get container status \"e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2\": rpc error: code = NotFound desc = could not find container \"e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2\": container with ID starting with e7aa9fbb04f633b66fa13357a9a0f4d74482f4cc58748301219a69d3e0ac7cd2 not found: ID does not exist" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.119736 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k4ww\" (UniqueName: \"kubernetes.io/projected/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-kube-api-access-5k4ww\") pod \"nova-scheduler-0\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.119984 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.120176 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-config-data\") pod \"nova-scheduler-0\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.160720 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:15:43 crc kubenswrapper[4876]: W1215 07:15:43.160770 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc226c84_f3f6_47dc_ba09_3a79dd00e5d0.slice/crio-754597aeae041d754951a2abf4d6bfa1c686422b63743e07a650429860fd06d5 WatchSource:0}: Error finding container 754597aeae041d754951a2abf4d6bfa1c686422b63743e07a650429860fd06d5: Status 404 returned error can't find the container with id 754597aeae041d754951a2abf4d6bfa1c686422b63743e07a650429860fd06d5 Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.222397 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-config-data\") pod \"nova-scheduler-0\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.222600 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k4ww\" (UniqueName: \"kubernetes.io/projected/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-kube-api-access-5k4ww\") pod \"nova-scheduler-0\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.222683 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.228422 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.230512 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-config-data\") pod \"nova-scheduler-0\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.239508 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k4ww\" (UniqueName: \"kubernetes.io/projected/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-kube-api-access-5k4ww\") pod \"nova-scheduler-0\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " pod="openstack/nova-scheduler-0" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.278451 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.794791 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.850376 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0","Type":"ContainerStarted","Data":"c47bf08f93b6c00bfb83bcd8d9986cba9e1dc9ea271a02a648096725810385b2"} Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.850467 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0","Type":"ContainerStarted","Data":"673940f40218b378a3a021cc4f2020bd1bf2de102f6afab63a132d17f98cd088"} Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.850491 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0","Type":"ContainerStarted","Data":"754597aeae041d754951a2abf4d6bfa1c686422b63743e07a650429860fd06d5"} Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.855694 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7","Type":"ContainerStarted","Data":"4742792aa81c700479e739cf48000abc06720885c34fa9b25570ba7d3c44d69d"} Dec 15 07:15:43 crc kubenswrapper[4876]: I1215 07:15:43.880390 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.880373666 podStartE2EDuration="1.880373666s" podCreationTimestamp="2025-12-15 07:15:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:43.878743542 +0000 UTC m=+1469.449886453" watchObservedRunningTime="2025-12-15 07:15:43.880373666 +0000 UTC m=+1469.451516577" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.167889 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": read tcp 10.217.0.2:60688->10.217.0.189:8775: read: connection reset by peer" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.167914 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": read tcp 10.217.0.2:60696->10.217.0.189:8775: read: connection reset by peer" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.671006 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.738018 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="167c4c13-f766-4ecc-8d1c-0ba875acaf9b" path="/var/lib/kubelet/pods/167c4c13-f766-4ecc-8d1c-0ba875acaf9b/volumes" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.738741 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2500c4d1-49f3-47c5-a3a4-1c00b4b29844" path="/var/lib/kubelet/pods/2500c4d1-49f3-47c5-a3a4-1c00b4b29844/volumes" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.751834 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-nova-metadata-tls-certs\") pod \"50416e26-c694-47d8-a4af-109bf57f6f36\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.751910 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-combined-ca-bundle\") pod \"50416e26-c694-47d8-a4af-109bf57f6f36\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.752038 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50416e26-c694-47d8-a4af-109bf57f6f36-logs\") pod \"50416e26-c694-47d8-a4af-109bf57f6f36\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.752064 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-config-data\") pod \"50416e26-c694-47d8-a4af-109bf57f6f36\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.752136 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqg5c\" (UniqueName: \"kubernetes.io/projected/50416e26-c694-47d8-a4af-109bf57f6f36-kube-api-access-tqg5c\") pod \"50416e26-c694-47d8-a4af-109bf57f6f36\" (UID: \"50416e26-c694-47d8-a4af-109bf57f6f36\") " Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.761791 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50416e26-c694-47d8-a4af-109bf57f6f36-logs" (OuterVolumeSpecName: "logs") pod "50416e26-c694-47d8-a4af-109bf57f6f36" (UID: "50416e26-c694-47d8-a4af-109bf57f6f36"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.778076 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50416e26-c694-47d8-a4af-109bf57f6f36-kube-api-access-tqg5c" (OuterVolumeSpecName: "kube-api-access-tqg5c") pod "50416e26-c694-47d8-a4af-109bf57f6f36" (UID: "50416e26-c694-47d8-a4af-109bf57f6f36"). InnerVolumeSpecName "kube-api-access-tqg5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.792198 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50416e26-c694-47d8-a4af-109bf57f6f36" (UID: "50416e26-c694-47d8-a4af-109bf57f6f36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.821388 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "50416e26-c694-47d8-a4af-109bf57f6f36" (UID: "50416e26-c694-47d8-a4af-109bf57f6f36"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.830743 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-config-data" (OuterVolumeSpecName: "config-data") pod "50416e26-c694-47d8-a4af-109bf57f6f36" (UID: "50416e26-c694-47d8-a4af-109bf57f6f36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.854258 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50416e26-c694-47d8-a4af-109bf57f6f36-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.854296 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.854308 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqg5c\" (UniqueName: \"kubernetes.io/projected/50416e26-c694-47d8-a4af-109bf57f6f36-kube-api-access-tqg5c\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.854320 4876 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.854331 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50416e26-c694-47d8-a4af-109bf57f6f36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.883369 4876 generic.go:334] "Generic (PLEG): container finished" podID="50416e26-c694-47d8-a4af-109bf57f6f36" containerID="1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e" exitCode=0 Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.883431 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50416e26-c694-47d8-a4af-109bf57f6f36","Type":"ContainerDied","Data":"1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e"} Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.883459 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"50416e26-c694-47d8-a4af-109bf57f6f36","Type":"ContainerDied","Data":"2c646bc4d65cf229e3d6d0a3b32d18229b4f2d6e9b02c602a03928aacf0ed9e8"} Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.883477 4876 scope.go:117] "RemoveContainer" containerID="1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.883571 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.893623 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7","Type":"ContainerStarted","Data":"529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f"} Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.913804 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.913602746 podStartE2EDuration="2.913602746s" podCreationTimestamp="2025-12-15 07:15:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:44.912353033 +0000 UTC m=+1470.483495964" watchObservedRunningTime="2025-12-15 07:15:44.913602746 +0000 UTC m=+1470.484745677" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.931956 4876 scope.go:117] "RemoveContainer" containerID="3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.954728 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.963960 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.974136 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:15:44 crc kubenswrapper[4876]: E1215 07:15:44.974638 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-log" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.974658 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-log" Dec 15 07:15:44 crc kubenswrapper[4876]: E1215 07:15:44.974700 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-metadata" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.974709 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-metadata" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.974931 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-log" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.974956 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" containerName="nova-metadata-metadata" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.976185 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.980037 4876 scope.go:117] "RemoveContainer" containerID="1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.980336 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.980478 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 15 07:15:44 crc kubenswrapper[4876]: E1215 07:15:44.987015 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e\": container with ID starting with 1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e not found: ID does not exist" containerID="1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.987056 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e"} err="failed to get container status \"1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e\": rpc error: code = NotFound desc = could not find container \"1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e\": container with ID starting with 1065aee4318ea138f767ccefb6d9f8a37801b7a772f826a08b72a80e05134e6e not found: ID does not exist" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.987083 4876 scope.go:117] "RemoveContainer" containerID="3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.987657 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:15:44 crc kubenswrapper[4876]: E1215 07:15:44.990301 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590\": container with ID starting with 3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590 not found: ID does not exist" containerID="3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590" Dec 15 07:15:44 crc kubenswrapper[4876]: I1215 07:15:44.990341 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590"} err="failed to get container status \"3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590\": rpc error: code = NotFound desc = could not find container \"3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590\": container with ID starting with 3e716e776480342357cbb9eee0d40591a0d30b528a30a772cfc943dfca777590 not found: ID does not exist" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.057092 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmx8z\" (UniqueName: \"kubernetes.io/projected/83083084-55fd-4e95-87bf-bebcc2d41fb8-kube-api-access-zmx8z\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.057151 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83083084-55fd-4e95-87bf-bebcc2d41fb8-logs\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.057176 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.057470 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-config-data\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.057636 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.160310 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-config-data\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.160406 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.160495 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmx8z\" (UniqueName: \"kubernetes.io/projected/83083084-55fd-4e95-87bf-bebcc2d41fb8-kube-api-access-zmx8z\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.160525 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83083084-55fd-4e95-87bf-bebcc2d41fb8-logs\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.160553 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.168394 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83083084-55fd-4e95-87bf-bebcc2d41fb8-logs\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.169805 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.169884 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.173099 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-config-data\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.190178 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmx8z\" (UniqueName: \"kubernetes.io/projected/83083084-55fd-4e95-87bf-bebcc2d41fb8-kube-api-access-zmx8z\") pod \"nova-metadata-0\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.307745 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.799571 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:15:45 crc kubenswrapper[4876]: W1215 07:15:45.800071 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83083084_55fd_4e95_87bf_bebcc2d41fb8.slice/crio-9f94543edadc892f0dac7a43a7361d73c20fc0fd0776d3e250d7e03db91feefe WatchSource:0}: Error finding container 9f94543edadc892f0dac7a43a7361d73c20fc0fd0776d3e250d7e03db91feefe: Status 404 returned error can't find the container with id 9f94543edadc892f0dac7a43a7361d73c20fc0fd0776d3e250d7e03db91feefe Dec 15 07:15:45 crc kubenswrapper[4876]: I1215 07:15:45.907955 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"83083084-55fd-4e95-87bf-bebcc2d41fb8","Type":"ContainerStarted","Data":"9f94543edadc892f0dac7a43a7361d73c20fc0fd0776d3e250d7e03db91feefe"} Dec 15 07:15:46 crc kubenswrapper[4876]: I1215 07:15:46.719861 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50416e26-c694-47d8-a4af-109bf57f6f36" path="/var/lib/kubelet/pods/50416e26-c694-47d8-a4af-109bf57f6f36/volumes" Dec 15 07:15:46 crc kubenswrapper[4876]: I1215 07:15:46.922807 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"83083084-55fd-4e95-87bf-bebcc2d41fb8","Type":"ContainerStarted","Data":"cbf0f151002b45d509e231e069399143add0e99bc811fdc2edeef06787b39699"} Dec 15 07:15:46 crc kubenswrapper[4876]: I1215 07:15:46.922882 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"83083084-55fd-4e95-87bf-bebcc2d41fb8","Type":"ContainerStarted","Data":"4079f6a6d32eb7733edb32cf1b6f998b0b66e4e5adb9fa96b6a490f1de01b01b"} Dec 15 07:15:46 crc kubenswrapper[4876]: I1215 07:15:46.960523 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.960497201 podStartE2EDuration="2.960497201s" podCreationTimestamp="2025-12-15 07:15:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:15:46.947456321 +0000 UTC m=+1472.518599243" watchObservedRunningTime="2025-12-15 07:15:46.960497201 +0000 UTC m=+1472.531640122" Dec 15 07:15:48 crc kubenswrapper[4876]: I1215 07:15:48.279116 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 15 07:15:50 crc kubenswrapper[4876]: I1215 07:15:50.308950 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 07:15:50 crc kubenswrapper[4876]: I1215 07:15:50.309382 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 07:15:52 crc kubenswrapper[4876]: I1215 07:15:52.675974 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 07:15:52 crc kubenswrapper[4876]: I1215 07:15:52.676466 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 07:15:53 crc kubenswrapper[4876]: I1215 07:15:53.279553 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 15 07:15:53 crc kubenswrapper[4876]: I1215 07:15:53.325450 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 15 07:15:53 crc kubenswrapper[4876]: I1215 07:15:53.691286 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.201:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 15 07:15:53 crc kubenswrapper[4876]: I1215 07:15:53.691332 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.201:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 15 07:15:54 crc kubenswrapper[4876]: I1215 07:15:54.023143 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 15 07:15:55 crc kubenswrapper[4876]: I1215 07:15:55.308909 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 07:15:55 crc kubenswrapper[4876]: I1215 07:15:55.309228 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 07:15:56 crc kubenswrapper[4876]: I1215 07:15:56.322456 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 15 07:15:56 crc kubenswrapper[4876]: I1215 07:15:56.322491 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 15 07:15:57 crc kubenswrapper[4876]: I1215 07:15:57.322800 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:15:57 crc kubenswrapper[4876]: I1215 07:15:57.323229 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:16:02 crc kubenswrapper[4876]: I1215 07:16:02.129210 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 15 07:16:02 crc kubenswrapper[4876]: I1215 07:16:02.685926 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 15 07:16:02 crc kubenswrapper[4876]: I1215 07:16:02.686965 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 15 07:16:02 crc kubenswrapper[4876]: I1215 07:16:02.687737 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 15 07:16:02 crc kubenswrapper[4876]: I1215 07:16:02.698053 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 15 07:16:03 crc kubenswrapper[4876]: I1215 07:16:03.142559 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 15 07:16:03 crc kubenswrapper[4876]: I1215 07:16:03.148759 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 15 07:16:05 crc kubenswrapper[4876]: I1215 07:16:05.314953 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 15 07:16:05 crc kubenswrapper[4876]: I1215 07:16:05.316357 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 15 07:16:05 crc kubenswrapper[4876]: I1215 07:16:05.321777 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 15 07:16:06 crc kubenswrapper[4876]: I1215 07:16:06.172297 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.194908 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.195501 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="e3ade031-559a-4a09-b707-ab081a659fc9" containerName="openstackclient" containerID="cri-o://5984af4749e3e3b071be6cbab85aa2dc4f95eb610ca9d34223b709f90b67f795" gracePeriod=2 Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.225778 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.445601 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.491655 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance2c9d-account-delete-jqhxv"] Dec 15 07:16:24 crc kubenswrapper[4876]: E1215 07:16:24.492339 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3ade031-559a-4a09-b707-ab081a659fc9" containerName="openstackclient" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.492363 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3ade031-559a-4a09-b707-ab081a659fc9" containerName="openstackclient" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.492546 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3ade031-559a-4a09-b707-ab081a659fc9" containerName="openstackclient" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.498247 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance2c9d-account-delete-jqhxv" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.520077 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.520457 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerName="openstack-network-exporter" containerID="cri-o://a317e6932e2fdc31885ac1215fbae30ebcc70d8d5b074c22c0c2b75c7095ebd6" gracePeriod=300 Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.539422 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance2c9d-account-delete-jqhxv"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.549268 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frhbb\" (UniqueName: \"kubernetes.io/projected/d24022bd-28ab-402d-9078-c52208891ef8-kube-api-access-frhbb\") pod \"glance2c9d-account-delete-jqhxv\" (UID: \"d24022bd-28ab-402d-9078-c52208891ef8\") " pod="openstack/glance2c9d-account-delete-jqhxv" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.549374 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24022bd-28ab-402d-9078-c52208891ef8-operator-scripts\") pod \"glance2c9d-account-delete-jqhxv\" (UID: \"d24022bd-28ab-402d-9078-c52208891ef8\") " pod="openstack/glance2c9d-account-delete-jqhxv" Dec 15 07:16:24 crc kubenswrapper[4876]: E1215 07:16:24.549531 4876 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 15 07:16:24 crc kubenswrapper[4876]: E1215 07:16:24.549582 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data podName:db3f4964-0cca-4527-93de-457292de4be7 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:25.049565637 +0000 UTC m=+1510.620708548 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data") pod "rabbitmq-server-0" (UID: "db3f4964-0cca-4527-93de-457292de4be7") : configmap "rabbitmq-config-data" not found Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.575387 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.575623 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerName="ovn-northd" containerID="cri-o://8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" gracePeriod=30 Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.575742 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerName="openstack-network-exporter" containerID="cri-o://c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626" gracePeriod=30 Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.629480 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement4aed-account-delete-jwsnh"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.630635 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement4aed-account-delete-jwsnh" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.650593 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frhbb\" (UniqueName: \"kubernetes.io/projected/d24022bd-28ab-402d-9078-c52208891ef8-kube-api-access-frhbb\") pod \"glance2c9d-account-delete-jqhxv\" (UID: \"d24022bd-28ab-402d-9078-c52208891ef8\") " pod="openstack/glance2c9d-account-delete-jqhxv" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.650666 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86xkg\" (UniqueName: \"kubernetes.io/projected/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-kube-api-access-86xkg\") pod \"placement4aed-account-delete-jwsnh\" (UID: \"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71\") " pod="openstack/placement4aed-account-delete-jwsnh" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.650696 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-operator-scripts\") pod \"placement4aed-account-delete-jwsnh\" (UID: \"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71\") " pod="openstack/placement4aed-account-delete-jwsnh" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.650713 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24022bd-28ab-402d-9078-c52208891ef8-operator-scripts\") pod \"glance2c9d-account-delete-jqhxv\" (UID: \"d24022bd-28ab-402d-9078-c52208891ef8\") " pod="openstack/glance2c9d-account-delete-jqhxv" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.651438 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24022bd-28ab-402d-9078-c52208891ef8-operator-scripts\") pod \"glance2c9d-account-delete-jqhxv\" (UID: \"d24022bd-28ab-402d-9078-c52208891ef8\") " pod="openstack/glance2c9d-account-delete-jqhxv" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.690753 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frhbb\" (UniqueName: \"kubernetes.io/projected/d24022bd-28ab-402d-9078-c52208891ef8-kube-api-access-frhbb\") pod \"glance2c9d-account-delete-jqhxv\" (UID: \"d24022bd-28ab-402d-9078-c52208891ef8\") " pod="openstack/glance2c9d-account-delete-jqhxv" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.695304 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.695887 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerName="openstack-network-exporter" containerID="cri-o://84311730edcfcde730c08a397de2aee0f27365e627e2d75a586ebc62a3484c83" gracePeriod=300 Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.722212 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerName="ovsdbserver-sb" containerID="cri-o://10e3e7fbcb8c5a26a5888a2b01f66cead7b43ca5c9e509f9062271dd99393b62" gracePeriod=300 Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.748783 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-j6772"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.748818 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-qnzgp"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.755689 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-operator-scripts\") pod \"placement4aed-account-delete-jwsnh\" (UID: \"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71\") " pod="openstack/placement4aed-account-delete-jwsnh" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.756011 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86xkg\" (UniqueName: \"kubernetes.io/projected/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-kube-api-access-86xkg\") pod \"placement4aed-account-delete-jwsnh\" (UID: \"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71\") " pod="openstack/placement4aed-account-delete-jwsnh" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.757370 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-operator-scripts\") pod \"placement4aed-account-delete-jwsnh\" (UID: \"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71\") " pod="openstack/placement4aed-account-delete-jwsnh" Dec 15 07:16:24 crc kubenswrapper[4876]: E1215 07:16:24.761661 4876 secret.go:188] Couldn't get secret openstack/cinder-scripts: secret "cinder-scripts" not found Dec 15 07:16:24 crc kubenswrapper[4876]: E1215 07:16:24.761713 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts podName:bcf47e8b-49d3-45cb-a496-b3a5a256cc5c nodeName:}" failed. No retries permitted until 2025-12-15 07:16:25.261698992 +0000 UTC m=+1510.832841903 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts") pod "cinder-api-0" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c") : secret "cinder-scripts" not found Dec 15 07:16:24 crc kubenswrapper[4876]: E1215 07:16:24.762020 4876 secret.go:188] Couldn't get secret openstack/cinder-config-data: secret "cinder-config-data" not found Dec 15 07:16:24 crc kubenswrapper[4876]: E1215 07:16:24.762094 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data podName:bcf47e8b-49d3-45cb-a496-b3a5a256cc5c nodeName:}" failed. No retries permitted until 2025-12-15 07:16:25.262082393 +0000 UTC m=+1510.833225314 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data") pod "cinder-api-0" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c") : secret "cinder-config-data" not found Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.764763 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-j6772"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.793141 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-qnzgp"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.821472 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance2c9d-account-delete-jqhxv" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.867192 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement4aed-account-delete-jwsnh"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.884574 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86xkg\" (UniqueName: \"kubernetes.io/projected/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-kube-api-access-86xkg\") pod \"placement4aed-account-delete-jwsnh\" (UID: \"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71\") " pod="openstack/placement4aed-account-delete-jwsnh" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.890195 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinderfc60-account-delete-vg9nk"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.891373 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinderfc60-account-delete-vg9nk" Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.901121 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-knn2n"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.901319 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-knn2n" podUID="87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" containerName="openstack-network-exporter" containerID="cri-o://69805d5a06cd5524fafd7005c790931dcf3f61348fabbc5ef0cc66ad4bdaf487" gracePeriod=30 Dec 15 07:16:24 crc kubenswrapper[4876]: E1215 07:16:24.927185 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="10e3e7fbcb8c5a26a5888a2b01f66cead7b43ca5c9e509f9062271dd99393b62" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.927456 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-gbp2z"] Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.980427 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerName="ovsdbserver-nb" containerID="cri-o://2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721" gracePeriod=300 Dec 15 07:16:24 crc kubenswrapper[4876]: I1215 07:16:24.996627 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinderfc60-account-delete-vg9nk"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.003585 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-operator-scripts\") pod \"cinderfc60-account-delete-vg9nk\" (UID: \"233c0ff5-7edd-41c4-8e16-7de48b9fc76c\") " pod="openstack/cinderfc60-account-delete-vg9nk" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.003676 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dv2d\" (UniqueName: \"kubernetes.io/projected/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-kube-api-access-6dv2d\") pod \"cinderfc60-account-delete-vg9nk\" (UID: \"233c0ff5-7edd-41c4-8e16-7de48b9fc76c\") " pod="openstack/cinderfc60-account-delete-vg9nk" Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.021353 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="10e3e7fbcb8c5a26a5888a2b01f66cead7b43ca5c9e509f9062271dd99393b62" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.069689 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="10e3e7fbcb8c5a26a5888a2b01f66cead7b43ca5c9e509f9062271dd99393b62" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.069777 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerName="ovsdbserver-sb" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.107871 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-operator-scripts\") pod \"cinderfc60-account-delete-vg9nk\" (UID: \"233c0ff5-7edd-41c4-8e16-7de48b9fc76c\") " pod="openstack/cinderfc60-account-delete-vg9nk" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.107920 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dv2d\" (UniqueName: \"kubernetes.io/projected/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-kube-api-access-6dv2d\") pod \"cinderfc60-account-delete-vg9nk\" (UID: \"233c0ff5-7edd-41c4-8e16-7de48b9fc76c\") " pod="openstack/cinderfc60-account-delete-vg9nk" Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.108237 4876 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.108274 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data podName:db3f4964-0cca-4527-93de-457292de4be7 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:26.108262636 +0000 UTC m=+1511.679405537 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data") pod "rabbitmq-server-0" (UID: "db3f4964-0cca-4527-93de-457292de4be7") : configmap "rabbitmq-config-data" not found Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.110424 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-operator-scripts\") pod \"cinderfc60-account-delete-vg9nk\" (UID: \"233c0ff5-7edd-41c4-8e16-7de48b9fc76c\") " pod="openstack/cinderfc60-account-delete-vg9nk" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.115818 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.120797 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement4aed-account-delete-jwsnh" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.135559 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dv2d\" (UniqueName: \"kubernetes.io/projected/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-kube-api-access-6dv2d\") pod \"cinderfc60-account-delete-vg9nk\" (UID: \"233c0ff5-7edd-41c4-8e16-7de48b9fc76c\") " pod="openstack/cinderfc60-account-delete-vg9nk" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.141688 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican2a14-account-delete-mbwx7"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.150019 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican2a14-account-delete-mbwx7" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.197294 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican2a14-account-delete-mbwx7"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.211073 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts\") pod \"barbican2a14-account-delete-mbwx7\" (UID: \"1501e554-af82-4179-959b-475a30b910d5\") " pod="openstack/barbican2a14-account-delete-mbwx7" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.211592 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvd7q\" (UniqueName: \"kubernetes.io/projected/1501e554-af82-4179-959b-475a30b910d5-kube-api-access-cvd7q\") pod \"barbican2a14-account-delete-mbwx7\" (UID: \"1501e554-af82-4179-959b-475a30b910d5\") " pod="openstack/barbican2a14-account-delete-mbwx7" Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.212553 4876 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.212595 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data podName:2a1020a1-7afe-46ee-b5c4-40a9290a05e1 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:25.712581061 +0000 UTC m=+1511.283723972 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data") pod "rabbitmq-cell1-server-0" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1") : configmap "rabbitmq-cell1-config-data" not found Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.257579 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-pr47l"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.306314 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinderfc60-account-delete-vg9nk" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.313557 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-mx7zg"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.316040 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvd7q\" (UniqueName: \"kubernetes.io/projected/1501e554-af82-4179-959b-475a30b910d5-kube-api-access-cvd7q\") pod \"barbican2a14-account-delete-mbwx7\" (UID: \"1501e554-af82-4179-959b-475a30b910d5\") " pod="openstack/barbican2a14-account-delete-mbwx7" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.316181 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts\") pod \"barbican2a14-account-delete-mbwx7\" (UID: \"1501e554-af82-4179-959b-475a30b910d5\") " pod="openstack/barbican2a14-account-delete-mbwx7" Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.316715 4876 secret.go:188] Couldn't get secret openstack/cinder-scripts: secret "cinder-scripts" not found Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.316777 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts podName:bcf47e8b-49d3-45cb-a496-b3a5a256cc5c nodeName:}" failed. No retries permitted until 2025-12-15 07:16:26.316760103 +0000 UTC m=+1511.887903014 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts") pod "cinder-api-0" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c") : secret "cinder-scripts" not found Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.317351 4876 secret.go:188] Couldn't get secret openstack/cinder-config-data: secret "cinder-config-data" not found Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.317388 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data podName:bcf47e8b-49d3-45cb-a496-b3a5a256cc5c nodeName:}" failed. No retries permitted until 2025-12-15 07:16:26.31737824 +0000 UTC m=+1511.888521151 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data") pod "cinder-api-0" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c") : secret "cinder-config-data" not found Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.317688 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts\") pod \"barbican2a14-account-delete-mbwx7\" (UID: \"1501e554-af82-4179-959b-475a30b910d5\") " pod="openstack/barbican2a14-account-delete-mbwx7" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.339657 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutrona915-account-delete-gswz2"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.340870 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutrona915-account-delete-gswz2" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.358764 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvd7q\" (UniqueName: \"kubernetes.io/projected/1501e554-af82-4179-959b-475a30b910d5-kube-api-access-cvd7q\") pod \"barbican2a14-account-delete-mbwx7\" (UID: \"1501e554-af82-4179-959b-475a30b910d5\") " pod="openstack/barbican2a14-account-delete-mbwx7" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.427722 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb254\" (UniqueName: \"kubernetes.io/projected/d5c5b73f-18fa-4bef-b443-15328769a818-kube-api-access-nb254\") pod \"neutrona915-account-delete-gswz2\" (UID: \"d5c5b73f-18fa-4bef-b443-15328769a818\") " pod="openstack/neutrona915-account-delete-gswz2" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.427829 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts\") pod \"neutrona915-account-delete-gswz2\" (UID: \"d5c5b73f-18fa-4bef-b443-15328769a818\") " pod="openstack/neutrona915-account-delete-gswz2" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.442634 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-mx7zg"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.455525 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-s6h7g"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.501022 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-s6h7g"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.593319 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts\") pod \"neutrona915-account-delete-gswz2\" (UID: \"d5c5b73f-18fa-4bef-b443-15328769a818\") " pod="openstack/neutrona915-account-delete-gswz2" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.593625 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb254\" (UniqueName: \"kubernetes.io/projected/d5c5b73f-18fa-4bef-b443-15328769a818-kube-api-access-nb254\") pod \"neutrona915-account-delete-gswz2\" (UID: \"d5c5b73f-18fa-4bef-b443-15328769a818\") " pod="openstack/neutrona915-account-delete-gswz2" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.597508 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican2a14-account-delete-mbwx7" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.640138 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts\") pod \"neutrona915-account-delete-gswz2\" (UID: \"d5c5b73f-18fa-4bef-b443-15328769a818\") " pod="openstack/neutrona915-account-delete-gswz2" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.706624 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutrona915-account-delete-gswz2"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.723879 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-knn2n_87fa47d9-4ce3-4233-baf3-a5b86ab26dd1/openstack-network-exporter/0.log" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.724312 4876 generic.go:334] "Generic (PLEG): container finished" podID="87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" containerID="69805d5a06cd5524fafd7005c790931dcf3f61348fabbc5ef0cc66ad4bdaf487" exitCode=2 Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.724550 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-knn2n" event={"ID":"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1","Type":"ContainerDied","Data":"69805d5a06cd5524fafd7005c790931dcf3f61348fabbc5ef0cc66ad4bdaf487"} Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.725654 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb254\" (UniqueName: \"kubernetes.io/projected/d5c5b73f-18fa-4bef-b443-15328769a818-kube-api-access-nb254\") pod \"neutrona915-account-delete-gswz2\" (UID: \"d5c5b73f-18fa-4bef-b443-15328769a818\") " pod="openstack/neutrona915-account-delete-gswz2" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.744068 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_189a34a7-d451-4c26-b84e-5b056fe1e93b/ovsdbserver-nb/0.log" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.744372 4876 generic.go:334] "Generic (PLEG): container finished" podID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerID="84311730edcfcde730c08a397de2aee0f27365e627e2d75a586ebc62a3484c83" exitCode=2 Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.744400 4876 generic.go:334] "Generic (PLEG): container finished" podID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerID="2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721" exitCode=143 Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.744474 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"189a34a7-d451-4c26-b84e-5b056fe1e93b","Type":"ContainerDied","Data":"84311730edcfcde730c08a397de2aee0f27365e627e2d75a586ebc62a3484c83"} Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.744500 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"189a34a7-d451-4c26-b84e-5b056fe1e93b","Type":"ContainerDied","Data":"2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721"} Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.747868 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapic196-account-delete-j7dqh"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.759932 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8fc3485b-9f78-40d0-b864-b40626fdba7c/ovsdbserver-sb/0.log" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.760013 4876 generic.go:334] "Generic (PLEG): container finished" podID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerID="a317e6932e2fdc31885ac1215fbae30ebcc70d8d5b074c22c0c2b75c7095ebd6" exitCode=2 Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.760032 4876 generic.go:334] "Generic (PLEG): container finished" podID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerID="10e3e7fbcb8c5a26a5888a2b01f66cead7b43ca5c9e509f9062271dd99393b62" exitCode=143 Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.792716 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapic196-account-delete-j7dqh"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.792994 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8fc3485b-9f78-40d0-b864-b40626fdba7c","Type":"ContainerDied","Data":"a317e6932e2fdc31885ac1215fbae30ebcc70d8d5b074c22c0c2b75c7095ebd6"} Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.793020 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8fc3485b-9f78-40d0-b864-b40626fdba7c","Type":"ContainerDied","Data":"10e3e7fbcb8c5a26a5888a2b01f66cead7b43ca5c9e509f9062271dd99393b62"} Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.793095 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic196-account-delete-j7dqh" Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.805387 4876 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 15 07:16:25 crc kubenswrapper[4876]: E1215 07:16:25.805431 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data podName:2a1020a1-7afe-46ee-b5c4-40a9290a05e1 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:26.805419612 +0000 UTC m=+1512.376562523 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data") pod "rabbitmq-cell1-server-0" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1") : configmap "rabbitmq-cell1-config-data" not found Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.805714 4876 generic.go:334] "Generic (PLEG): container finished" podID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerID="c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626" exitCode=2 Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.805740 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fbe2efa0-0b59-42cb-93d7-3540f4b03613","Type":"ContainerDied","Data":"c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626"} Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.808006 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell0d7a8-account-delete-265rv"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.809173 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0d7a8-account-delete-265rv" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.831512 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0d7a8-account-delete-265rv"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.851605 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-nhpf5"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.865289 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-nhpf5"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.907088 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts\") pod \"novacell0d7a8-account-delete-265rv\" (UID: \"bd3ee522-830b-4cde-ad14-15bcc28b7d06\") " pod="openstack/novacell0d7a8-account-delete-265rv" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.907153 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twr74\" (UniqueName: \"kubernetes.io/projected/bd3ee522-830b-4cde-ad14-15bcc28b7d06-kube-api-access-twr74\") pod \"novacell0d7a8-account-delete-265rv\" (UID: \"bd3ee522-830b-4cde-ad14-15bcc28b7d06\") " pod="openstack/novacell0d7a8-account-delete-265rv" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.907244 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5k8w\" (UniqueName: \"kubernetes.io/projected/478129d4-b31d-47e4-a6d4-b4aa7416356e-kube-api-access-x5k8w\") pod \"novaapic196-account-delete-j7dqh\" (UID: \"478129d4-b31d-47e4-a6d4-b4aa7416356e\") " pod="openstack/novaapic196-account-delete-j7dqh" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.907283 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts\") pod \"novaapic196-account-delete-j7dqh\" (UID: \"478129d4-b31d-47e4-a6d4-b4aa7416356e\") " pod="openstack/novaapic196-account-delete-j7dqh" Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.924049 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ddd577785-dsmf2"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.924283 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" podUID="db2340b9-e33b-432a-a1e6-df022337da1c" containerName="dnsmasq-dns" containerID="cri-o://ef7fbf45baeea2399584cf009bc9639d23e5763a916d5051a8fd664fd8dbafa0" gracePeriod=10 Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.947526 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-64c85ddd54-vd84c"] Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.947847 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-64c85ddd54-vd84c" podUID="23569e4d-9d69-4947-9293-50d1667c1eda" containerName="placement-log" containerID="cri-o://08cefbd86f57e35feb0652dfa8a1d0ef70620766258370c430d6aa69ef015c8f" gracePeriod=30 Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.948245 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-64c85ddd54-vd84c" podUID="23569e4d-9d69-4947-9293-50d1667c1eda" containerName="placement-api" containerID="cri-o://cef34266913f1af42f423b9d794de57144b0ceaffd39d3e744bc8fb3a3cb3355" gracePeriod=30 Dec 15 07:16:25 crc kubenswrapper[4876]: I1215 07:16:25.984026 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-lld2z"] Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.005675 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutrona915-account-delete-gswz2" Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.009875 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5k8w\" (UniqueName: \"kubernetes.io/projected/478129d4-b31d-47e4-a6d4-b4aa7416356e-kube-api-access-x5k8w\") pod \"novaapic196-account-delete-j7dqh\" (UID: \"478129d4-b31d-47e4-a6d4-b4aa7416356e\") " pod="openstack/novaapic196-account-delete-j7dqh" Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.009947 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts\") pod \"novaapic196-account-delete-j7dqh\" (UID: \"478129d4-b31d-47e4-a6d4-b4aa7416356e\") " pod="openstack/novaapic196-account-delete-j7dqh" Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.010032 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts\") pod \"novacell0d7a8-account-delete-265rv\" (UID: \"bd3ee522-830b-4cde-ad14-15bcc28b7d06\") " pod="openstack/novacell0d7a8-account-delete-265rv" Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.010054 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twr74\" (UniqueName: \"kubernetes.io/projected/bd3ee522-830b-4cde-ad14-15bcc28b7d06-kube-api-access-twr74\") pod \"novacell0d7a8-account-delete-265rv\" (UID: \"bd3ee522-830b-4cde-ad14-15bcc28b7d06\") " pod="openstack/novacell0d7a8-account-delete-265rv" Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.017039 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-lld2z"] Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.017817 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts\") pod \"novacell0d7a8-account-delete-265rv\" (UID: \"bd3ee522-830b-4cde-ad14-15bcc28b7d06\") " pod="openstack/novacell0d7a8-account-delete-265rv" Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.029586 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts\") pod \"novaapic196-account-delete-j7dqh\" (UID: \"478129d4-b31d-47e4-a6d4-b4aa7416356e\") " pod="openstack/novaapic196-account-delete-j7dqh" Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.067235 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5k8w\" (UniqueName: \"kubernetes.io/projected/478129d4-b31d-47e4-a6d4-b4aa7416356e-kube-api-access-x5k8w\") pod \"novaapic196-account-delete-j7dqh\" (UID: \"478129d4-b31d-47e4-a6d4-b4aa7416356e\") " pod="openstack/novaapic196-account-delete-j7dqh" Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.083271 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-9tmcs"] Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.083754 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twr74\" (UniqueName: \"kubernetes.io/projected/bd3ee522-830b-4cde-ad14-15bcc28b7d06-kube-api-access-twr74\") pod \"novacell0d7a8-account-delete-265rv\" (UID: \"bd3ee522-830b-4cde-ad14-15bcc28b7d06\") " pod="openstack/novacell0d7a8-account-delete-265rv" Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.084007 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic196-account-delete-j7dqh" Dec 15 07:16:26 crc kubenswrapper[4876]: E1215 07:16:26.112870 4876 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 15 07:16:26 crc kubenswrapper[4876]: E1215 07:16:26.112931 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data podName:db3f4964-0cca-4527-93de-457292de4be7 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:28.11291376 +0000 UTC m=+1513.684056671 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data") pod "rabbitmq-server-0" (UID: "db3f4964-0cca-4527-93de-457292de4be7") : configmap "rabbitmq-config-data" not found Dec 15 07:16:26 crc kubenswrapper[4876]: I1215 07:16:26.173137 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-9tmcs"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.225028 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.225288 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" containerName="glance-log" containerID="cri-o://cc294b31b47b3b2be756a6a68ef1afe2c1758bb4597cea8ea401001afc30deaa" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.225689 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" containerName="glance-httpd" containerID="cri-o://c5bf69c338d2955649defd7611ec504382e5c3542edee5ca3f56d01332885ae6" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.286410 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.286630 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="79b75337-8832-415e-a91f-2f8edd407cf1" containerName="cinder-scheduler" containerID="cri-o://b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.287010 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="79b75337-8832-415e-a91f-2f8edd407cf1" containerName="probe" containerID="cri-o://4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.317927 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.318248 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" containerName="glance-log" containerID="cri-o://77739227b2e4df94b91265646c063d9a27732d0f675c7fd40830a4b54d3a0c86" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.318415 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" containerName="glance-httpd" containerID="cri-o://1cb73a4bf7ed52b3476b312fe18978e8dfe39c5143fc358708eff5808b066bce" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.328046 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-57dd59bc-td2ns"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.329049 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-57dd59bc-td2ns" podUID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerName="neutron-api" containerID="cri-o://281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.329205 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-57dd59bc-td2ns" podUID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerName="neutron-httpd" containerID="cri-o://9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.340870 4876 secret.go:188] Couldn't get secret openstack/cinder-scripts: secret "cinder-scripts" not found Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.340941 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts podName:bcf47e8b-49d3-45cb-a496-b3a5a256cc5c nodeName:}" failed. No retries permitted until 2025-12-15 07:16:28.340925504 +0000 UTC m=+1513.912068415 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts") pod "cinder-api-0" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c") : secret "cinder-scripts" not found Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.340996 4876 secret.go:188] Couldn't get secret openstack/cinder-config-data: secret "cinder-config-data" not found Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.341024 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data podName:bcf47e8b-49d3-45cb-a496-b3a5a256cc5c nodeName:}" failed. No retries permitted until 2025-12-15 07:16:28.341015807 +0000 UTC m=+1513.912158718 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data") pod "cinder-api-0" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c") : secret "cinder-config-data" not found Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.363903 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.364307 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerName="cinder-api-log" containerID="cri-o://13335ca52da2d503c57740b99d9daf374ffc5018a3326ec34ac01d5b52c7213e" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.364685 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerName="cinder-api" containerID="cri-o://0adedfb29f59a78ed0ca95dedd5200ded03f6c4a9258dcf41996ef3271579c3d" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.385398 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-chs2q"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.438080 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-chs2q"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.502339 4876 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 15 07:16:27 crc kubenswrapper[4876]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 15 07:16:27 crc kubenswrapper[4876]: + source /usr/local/bin/container-scripts/functions Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNBridge=br-int Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNRemote=tcp:localhost:6642 Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNEncapType=geneve Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNAvailabilityZones= Dec 15 07:16:27 crc kubenswrapper[4876]: ++ EnableChassisAsGateway=true Dec 15 07:16:27 crc kubenswrapper[4876]: ++ PhysicalNetworks= Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNHostName= Dec 15 07:16:27 crc kubenswrapper[4876]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 15 07:16:27 crc kubenswrapper[4876]: ++ ovs_dir=/var/lib/openvswitch Dec 15 07:16:27 crc kubenswrapper[4876]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 15 07:16:27 crc kubenswrapper[4876]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 15 07:16:27 crc kubenswrapper[4876]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 15 07:16:27 crc kubenswrapper[4876]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 15 07:16:27 crc kubenswrapper[4876]: + sleep 0.5 Dec 15 07:16:27 crc kubenswrapper[4876]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 15 07:16:27 crc kubenswrapper[4876]: + sleep 0.5 Dec 15 07:16:27 crc kubenswrapper[4876]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 15 07:16:27 crc kubenswrapper[4876]: + cleanup_ovsdb_server_semaphore Dec 15 07:16:27 crc kubenswrapper[4876]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 15 07:16:27 crc kubenswrapper[4876]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 15 07:16:27 crc kubenswrapper[4876]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-gbp2z" message=< Dec 15 07:16:27 crc kubenswrapper[4876]: Exiting ovsdb-server (5) [ OK ] Dec 15 07:16:27 crc kubenswrapper[4876]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 15 07:16:27 crc kubenswrapper[4876]: + source /usr/local/bin/container-scripts/functions Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNBridge=br-int Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNRemote=tcp:localhost:6642 Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNEncapType=geneve Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNAvailabilityZones= Dec 15 07:16:27 crc kubenswrapper[4876]: ++ EnableChassisAsGateway=true Dec 15 07:16:27 crc kubenswrapper[4876]: ++ PhysicalNetworks= Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNHostName= Dec 15 07:16:27 crc kubenswrapper[4876]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 15 07:16:27 crc kubenswrapper[4876]: ++ ovs_dir=/var/lib/openvswitch Dec 15 07:16:27 crc kubenswrapper[4876]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 15 07:16:27 crc kubenswrapper[4876]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 15 07:16:27 crc kubenswrapper[4876]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 15 07:16:27 crc kubenswrapper[4876]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 15 07:16:27 crc kubenswrapper[4876]: + sleep 0.5 Dec 15 07:16:27 crc kubenswrapper[4876]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 15 07:16:27 crc kubenswrapper[4876]: + sleep 0.5 Dec 15 07:16:27 crc kubenswrapper[4876]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 15 07:16:27 crc kubenswrapper[4876]: + cleanup_ovsdb_server_semaphore Dec 15 07:16:27 crc kubenswrapper[4876]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 15 07:16:27 crc kubenswrapper[4876]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 15 07:16:27 crc kubenswrapper[4876]: > Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.502382 4876 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 15 07:16:27 crc kubenswrapper[4876]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 15 07:16:27 crc kubenswrapper[4876]: + source /usr/local/bin/container-scripts/functions Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNBridge=br-int Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNRemote=tcp:localhost:6642 Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNEncapType=geneve Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNAvailabilityZones= Dec 15 07:16:27 crc kubenswrapper[4876]: ++ EnableChassisAsGateway=true Dec 15 07:16:27 crc kubenswrapper[4876]: ++ PhysicalNetworks= Dec 15 07:16:27 crc kubenswrapper[4876]: ++ OVNHostName= Dec 15 07:16:27 crc kubenswrapper[4876]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 15 07:16:27 crc kubenswrapper[4876]: ++ ovs_dir=/var/lib/openvswitch Dec 15 07:16:27 crc kubenswrapper[4876]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 15 07:16:27 crc kubenswrapper[4876]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 15 07:16:27 crc kubenswrapper[4876]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 15 07:16:27 crc kubenswrapper[4876]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 15 07:16:27 crc kubenswrapper[4876]: + sleep 0.5 Dec 15 07:16:27 crc kubenswrapper[4876]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 15 07:16:27 crc kubenswrapper[4876]: + sleep 0.5 Dec 15 07:16:27 crc kubenswrapper[4876]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 15 07:16:27 crc kubenswrapper[4876]: + cleanup_ovsdb_server_semaphore Dec 15 07:16:27 crc kubenswrapper[4876]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 15 07:16:27 crc kubenswrapper[4876]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 15 07:16:27 crc kubenswrapper[4876]: > pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" containerID="cri-o://63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.502413 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" containerID="cri-o://63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" gracePeriod=29 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.502681 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503070 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-server" containerID="cri-o://ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503362 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="swift-recon-cron" containerID="cri-o://38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503401 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="rsync" containerID="cri-o://5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503433 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-expirer" containerID="cri-o://531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503463 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-updater" containerID="cri-o://3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503494 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-auditor" containerID="cri-o://e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503522 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-replicator" containerID="cri-o://6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503551 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-server" containerID="cri-o://f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503578 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-updater" containerID="cri-o://6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503606 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-auditor" containerID="cri-o://43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503632 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-replicator" containerID="cri-o://ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503662 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-server" containerID="cri-o://61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503688 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-reaper" containerID="cri-o://a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503715 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-auditor" containerID="cri-o://6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.503741 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-replicator" containerID="cri-o://b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.510981 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.525072 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.530191 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.530436 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-log" containerID="cri-o://4079f6a6d32eb7733edb32cf1b6f998b0b66e4e5adb9fa96b6a490f1de01b01b" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.530575 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-metadata" containerID="cri-o://cbf0f151002b45d509e231e069399143add0e99bc811fdc2edeef06787b39699" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.557414 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-7bffd5564f-rlt89"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.557670 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-7bffd5564f-rlt89" podUID="3b7ba44e-d87f-4b10-a601-eb425af47a70" containerName="barbican-worker-log" containerID="cri-o://cd961a36fd04b1c02785e7dd50ada2e1ee1120caf0386cf15d358851e661646b" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.558143 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-7bffd5564f-rlt89" podUID="3b7ba44e-d87f-4b10-a601-eb425af47a70" containerName="barbican-worker" containerID="cri-o://5d1b2d9f1bf51ee65493ed30ecd5e5bab31cd11e0866d010e9194c3abf215bce" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.585204 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.585436 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerName="nova-api-log" containerID="cri-o://673940f40218b378a3a021cc4f2020bd1bf2de102f6afab63a132d17f98cd088" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.585805 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerName="nova-api-api" containerID="cri-o://c47bf08f93b6c00bfb83bcd8d9986cba9e1dc9ea271a02a648096725810385b2" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.597337 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-8774998dd-ps8vx"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.597559 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-8774998dd-ps8vx" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerName="barbican-api-log" containerID="cri-o://4df3f9d2120bea7d34cfae05a4f5df18902f5f757c6e24aab3dce17f1b9c115c" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.597980 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-8774998dd-ps8vx" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerName="barbican-api" containerID="cri-o://ab2b7bcd77dc096c6f8bd4c255e23eb26198e47b55d43f5eb7d82a47546b3194" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.601537 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" containerID="cri-o://d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" gracePeriod=29 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.612200 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-3e2f-account-create-update-w9xkp"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.617705 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-3e2f-account-create-update-w9xkp"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.635062 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-pkjmb"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.658249 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-pkjmb"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.658302 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-56dc944b8b-zz758"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.658504 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" podUID="f7e97610-516d-4609-911c-53124ace7db0" containerName="barbican-keystone-listener-log" containerID="cri-o://409c22ba1b246c89eacddbba5e405955d8477034e987e355d0b9e54624d39f46" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.658870 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" podUID="f7e97610-516d-4609-911c-53124ace7db0" containerName="barbican-keystone-listener" containerID="cri-o://906e12a4e7207c08cbd8d84deaf12b6e44402bd7fad151f831fb9bbc8d378ea9" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.664601 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.664808 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.882357 4876 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.882703 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data podName:2a1020a1-7afe-46ee-b5c4-40a9290a05e1 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:28.882688556 +0000 UTC m=+1514.453831467 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data") pod "rabbitmq-cell1-server-0" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1") : configmap "rabbitmq-cell1-config-data" not found Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.889644 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721 is running failed: container process not found" containerID="2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.892552 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721 is running failed: container process not found" containerID="2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.896457 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721 is running failed: container process not found" containerID="2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:26.896506 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerName="ovsdbserver-nb" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.900125 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_189a34a7-d451-4c26-b84e-5b056fe1e93b/ovsdbserver-nb/0.log" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.911593 4876 generic.go:334] "Generic (PLEG): container finished" podID="23569e4d-9d69-4947-9293-50d1667c1eda" containerID="08cefbd86f57e35feb0652dfa8a1d0ef70620766258370c430d6aa69ef015c8f" exitCode=143 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.940270 4876 generic.go:334] "Generic (PLEG): container finished" podID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerID="4df3f9d2120bea7d34cfae05a4f5df18902f5f757c6e24aab3dce17f1b9c115c" exitCode=143 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.957289 4876 generic.go:334] "Generic (PLEG): container finished" podID="3b7ba44e-d87f-4b10-a601-eb425af47a70" containerID="cd961a36fd04b1c02785e7dd50ada2e1ee1120caf0386cf15d358851e661646b" exitCode=143 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.969989 4876 generic.go:334] "Generic (PLEG): container finished" podID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" exitCode=0 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.977058 4876 generic.go:334] "Generic (PLEG): container finished" podID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" containerID="cc294b31b47b3b2be756a6a68ef1afe2c1758bb4597cea8ea401001afc30deaa" exitCode=143 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.980786 4876 generic.go:334] "Generic (PLEG): container finished" podID="db2340b9-e33b-432a-a1e6-df022337da1c" containerID="ef7fbf45baeea2399584cf009bc9639d23e5763a916d5051a8fd664fd8dbafa0" exitCode=0 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:26.985067 4876 generic.go:334] "Generic (PLEG): container finished" podID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerID="13335ca52da2d503c57740b99d9daf374ffc5018a3326ec34ac01d5b52c7213e" exitCode=143 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.010846 4876 generic.go:334] "Generic (PLEG): container finished" podID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" containerID="77739227b2e4df94b91265646c063d9a27732d0f675c7fd40830a4b54d3a0c86" exitCode=143 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.025950 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-knn2n_87fa47d9-4ce3-4233-baf3-a5b86ab26dd1/openstack-network-exporter/0.log" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.040223 4876 generic.go:334] "Generic (PLEG): container finished" podID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerID="4079f6a6d32eb7733edb32cf1b6f998b0b66e4e5adb9fa96b6a490f1de01b01b" exitCode=143 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.150327 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df" exitCode=0 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.150352 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0" exitCode=0 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.150360 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328" exitCode=0 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.150367 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646" exitCode=0 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.150374 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e" exitCode=0 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.150380 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9" exitCode=0 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.150386 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c" exitCode=0 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.159965 4876 generic.go:334] "Generic (PLEG): container finished" podID="e3ade031-559a-4a09-b707-ab081a659fc9" containerID="5984af4749e3e3b071be6cbab85aa2dc4f95eb610ca9d34223b709f90b67f795" exitCode=137 Dec 15 07:16:27 crc kubenswrapper[4876]: W1215 07:16:27.170209 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3eb0262_ff07_4e0a_8e3b_9b147ccf7e71.slice/crio-bcb218540fc6cdde8ce41d1a56c550b5201a8636db083165b266cd207dbbcd67 WatchSource:0}: Error finding container bcb218540fc6cdde8ce41d1a56c550b5201a8636db083165b266cd207dbbcd67: Status 404 returned error can't find the container with id bcb218540fc6cdde8ce41d1a56c550b5201a8636db083165b266cd207dbbcd67 Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.170489 4876 log.go:32] "ExecSync cmd from runtime service failed" err=< Dec 15 07:16:27 crc kubenswrapper[4876]: rpc error: code = Unknown desc = command error: setns `mnt`: Bad file descriptor Dec 15 07:16:27 crc kubenswrapper[4876]: fail startup Dec 15 07:16:27 crc kubenswrapper[4876]: , stdout: , stderr: , exit code -1 Dec 15 07:16:27 crc kubenswrapper[4876]: > containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.174869 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" probeResult="failure" output=< Dec 15 07:16:27 crc kubenswrapper[4876]: cat: /var/run/openvswitch/ovs-vswitchd.pid: No such file or directory Dec 15 07:16:27 crc kubenswrapper[4876]: ERROR - Failed to get pid for ovs-vswitchd, exit status: 0 Dec 15 07:16:27 crc kubenswrapper[4876]: > Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.174986 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.222463 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.222497 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.231376 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2887b466-b7ab-45fe-9cf5-bff066201589" path="/var/lib/kubelet/pods/2887b466-b7ab-45fe-9cf5-bff066201589/volumes" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.232418 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46057e1c-873e-40f1-81c2-77c99f416cc7" path="/var/lib/kubelet/pods/46057e1c-873e-40f1-81c2-77c99f416cc7/volumes" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.233609 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79c75431-f896-4bee-9649-3a94221ace73" path="/var/lib/kubelet/pods/79c75431-f896-4bee-9649-3a94221ace73/volumes" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.236943 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="db3f4964-0cca-4527-93de-457292de4be7" containerName="rabbitmq" containerID="cri-o://0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42" gracePeriod=604800 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.237166 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ef8833d-2cde-4dae-b9d2-f77f18726a29" path="/var/lib/kubelet/pods/7ef8833d-2cde-4dae-b9d2-f77f18726a29/volumes" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.237827 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="822eb949-28b7-4487-9672-f4cd8dd3faa3" path="/var/lib/kubelet/pods/822eb949-28b7-4487-9672-f4cd8dd3faa3/volumes" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.238562 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88bc8b14-b04f-4062-b400-f8387184810e" path="/var/lib/kubelet/pods/88bc8b14-b04f-4062-b400-f8387184810e/volumes" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.240053 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaaef001-e178-4e64-80b1-e86fbd15ba8e" path="/var/lib/kubelet/pods/aaaef001-e178-4e64-80b1-e86fbd15ba8e/volumes" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.241565 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7cb8571-8a9d-469b-af65-6dfd59cafeab" path="/var/lib/kubelet/pods/c7cb8571-8a9d-469b-af65-6dfd59cafeab/volumes" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.242957 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4160ee3-8aac-4f22-b6c4-cce7d18781d6" path="/var/lib/kubelet/pods/d4160ee3-8aac-4f22-b6c4-cce7d18781d6/volumes" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.261583 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fef940f6-b480-49ee-a794-da81719669f8" path="/var/lib/kubelet/pods/fef940f6-b480-49ee-a794-da81719669f8/volumes" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262488 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262521 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"189a34a7-d451-4c26-b84e-5b056fe1e93b","Type":"ContainerDied","Data":"41798fc8e0958c48ed2f437a30c506cfe792d8e998ed9822c9c11928156963fe"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262543 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41798fc8e0958c48ed2f437a30c506cfe792d8e998ed9822c9c11928156963fe" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262558 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-64c85ddd54-vd84c" event={"ID":"23569e4d-9d69-4947-9293-50d1667c1eda","Type":"ContainerDied","Data":"08cefbd86f57e35feb0652dfa8a1d0ef70620766258370c430d6aa69ef015c8f"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262574 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance2c9d-account-delete-jqhxv"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262590 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement4aed-account-delete-jwsnh"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262604 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8774998dd-ps8vx" event={"ID":"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774","Type":"ContainerDied","Data":"4df3f9d2120bea7d34cfae05a4f5df18902f5f757c6e24aab3dce17f1b9c115c"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262619 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bffd5564f-rlt89" event={"ID":"3b7ba44e-d87f-4b10-a601-eb425af47a70","Type":"ContainerDied","Data":"cd961a36fd04b1c02785e7dd50ada2e1ee1120caf0386cf15d358851e661646b"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262634 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gbp2z" event={"ID":"b2f6914b-2d26-4417-a7c0-21eaf29a18bf","Type":"ContainerDied","Data":"63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262647 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1639e7c7-83c0-4a60-9f59-5b31772b9f35","Type":"ContainerDied","Data":"cc294b31b47b3b2be756a6a68ef1afe2c1758bb4597cea8ea401001afc30deaa"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262660 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" event={"ID":"db2340b9-e33b-432a-a1e6-df022337da1c","Type":"ContainerDied","Data":"ef7fbf45baeea2399584cf009bc9639d23e5763a916d5051a8fd664fd8dbafa0"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262675 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bgdfk"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262687 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c","Type":"ContainerDied","Data":"13335ca52da2d503c57740b99d9daf374ffc5018a3326ec34ac01d5b52c7213e"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262702 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4f937a46-5b8e-4d1c-bd05-3b729ffb8188","Type":"ContainerDied","Data":"77739227b2e4df94b91265646c063d9a27732d0f675c7fd40830a4b54d3a0c86"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262716 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262731 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-knn2n" event={"ID":"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1","Type":"ContainerDied","Data":"b7a27214e09139d172797f5205688fad1bc9ff2fd30ee9378f1540e7203b85e8"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262742 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7a27214e09139d172797f5205688fad1bc9ff2fd30ee9378f1540e7203b85e8" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262752 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"83083084-55fd-4e95-87bf-bebcc2d41fb8","Type":"ContainerDied","Data":"4079f6a6d32eb7733edb32cf1b6f998b0b66e4e5adb9fa96b6a490f1de01b01b"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262765 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262779 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262796 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bgdfk"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262816 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5sgjf"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262829 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5sgjf"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262842 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262857 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262869 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262884 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinderfc60-account-delete-vg9nk"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262897 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262908 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262918 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c"} Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.262933 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.269311 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" containerName="nova-cell1-conductor-conductor" containerID="cri-o://7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.269424 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="7a01ad7f-0924-4b3d-ba95-b5e599f343ee" containerName="nova-cell0-conductor-conductor" containerID="cri-o://3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.269517 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" containerName="nova-scheduler-scheduler" containerID="cri-o://529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f" gracePeriod=30 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.322126 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.322177 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.322210 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.322857 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.322905 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" gracePeriod=600 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.420294 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="2a1020a1-7afe-46ee-b5c4-40a9290a05e1" containerName="rabbitmq" containerID="cri-o://ef11419ef47c4d37303e61eee95b4d049fd5985bb7603bfa53e9e8035128272c" gracePeriod=604800 Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.534996 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.566898 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.567968 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.574038 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.574118 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" containerName="nova-cell1-conductor-conductor" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.674714 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="1503039d-0445-46ac-81ca-5af528a46ce2" containerName="galera" containerID="cri-o://da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787" gracePeriod=29 Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.692794 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0d7a8-account-delete-265rv" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.701640 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-knn2n_87fa47d9-4ce3-4233-baf3-a5b86ab26dd1/openstack-network-exporter/0.log" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.701722 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.838899 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovn-rundir\") pod \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.838947 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovs-rundir\") pod \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.838987 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" (UID: "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.839388 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" (UID: "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.839955 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_189a34a7-d451-4c26-b84e-5b056fe1e93b/ovsdbserver-nb/0.log" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.840018 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.840276 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-combined-ca-bundle\") pod \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.840393 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-metrics-certs-tls-certs\") pod \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.841215 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-config\") pod \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.841326 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87hh9\" (UniqueName: \"kubernetes.io/projected/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-kube-api-access-87hh9\") pod \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\" (UID: \"87fa47d9-4ce3-4233-baf3-a5b86ab26dd1\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.843250 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-config" (OuterVolumeSpecName: "config") pod "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" (UID: "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.843387 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.843412 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.843426 4876 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-ovs-rundir\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.848608 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8fc3485b-9f78-40d0-b864-b40626fdba7c/ovsdbserver-sb/0.log" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.848818 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.851013 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-kube-api-access-87hh9" (OuterVolumeSpecName: "kube-api-access-87hh9") pod "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" (UID: "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1"). InnerVolumeSpecName "kube-api-access-87hh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.881248 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.887718 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.890909 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" (UID: "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.900959 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 15 07:16:27 crc kubenswrapper[4876]: E1215 07:16:27.901010 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerName="ovn-northd" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.907980 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.920595 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949363 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-scripts\") pod \"8fc3485b-9f78-40d0-b864-b40626fdba7c\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949399 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"189a34a7-d451-4c26-b84e-5b056fe1e93b\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949424 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49bdz\" (UniqueName: \"kubernetes.io/projected/8fc3485b-9f78-40d0-b864-b40626fdba7c-kube-api-access-49bdz\") pod \"8fc3485b-9f78-40d0-b864-b40626fdba7c\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949475 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-metrics-certs-tls-certs\") pod \"8fc3485b-9f78-40d0-b864-b40626fdba7c\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949514 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"8fc3485b-9f78-40d0-b864-b40626fdba7c\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949552 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdbserver-sb-tls-certs\") pod \"8fc3485b-9f78-40d0-b864-b40626fdba7c\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949572 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-combined-ca-bundle\") pod \"8fc3485b-9f78-40d0-b864-b40626fdba7c\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949625 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-combined-ca-bundle\") pod \"189a34a7-d451-4c26-b84e-5b056fe1e93b\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949649 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdb-rundir\") pod \"189a34a7-d451-4c26-b84e-5b056fe1e93b\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949719 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-metrics-certs-tls-certs\") pod \"189a34a7-d451-4c26-b84e-5b056fe1e93b\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949741 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdb-rundir\") pod \"8fc3485b-9f78-40d0-b864-b40626fdba7c\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949766 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdbserver-nb-tls-certs\") pod \"189a34a7-d451-4c26-b84e-5b056fe1e93b\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949807 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmzm4\" (UniqueName: \"kubernetes.io/projected/189a34a7-d451-4c26-b84e-5b056fe1e93b-kube-api-access-zmzm4\") pod \"189a34a7-d451-4c26-b84e-5b056fe1e93b\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949830 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-scripts\") pod \"189a34a7-d451-4c26-b84e-5b056fe1e93b\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949844 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-config\") pod \"8fc3485b-9f78-40d0-b864-b40626fdba7c\" (UID: \"8fc3485b-9f78-40d0-b864-b40626fdba7c\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.949868 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-config\") pod \"189a34a7-d451-4c26-b84e-5b056fe1e93b\" (UID: \"189a34a7-d451-4c26-b84e-5b056fe1e93b\") " Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.952532 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87hh9\" (UniqueName: \"kubernetes.io/projected/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-kube-api-access-87hh9\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.952555 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.953324 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-config" (OuterVolumeSpecName: "config") pod "189a34a7-d451-4c26-b84e-5b056fe1e93b" (UID: "189a34a7-d451-4c26-b84e-5b056fe1e93b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.954897 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-scripts" (OuterVolumeSpecName: "scripts") pod "189a34a7-d451-4c26-b84e-5b056fe1e93b" (UID: "189a34a7-d451-4c26-b84e-5b056fe1e93b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.956897 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-scripts" (OuterVolumeSpecName: "scripts") pod "8fc3485b-9f78-40d0-b864-b40626fdba7c" (UID: "8fc3485b-9f78-40d0-b864-b40626fdba7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.956995 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-config" (OuterVolumeSpecName: "config") pod "8fc3485b-9f78-40d0-b864-b40626fdba7c" (UID: "8fc3485b-9f78-40d0-b864-b40626fdba7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.957278 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "8fc3485b-9f78-40d0-b864-b40626fdba7c" (UID: "8fc3485b-9f78-40d0-b864-b40626fdba7c"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.957696 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "189a34a7-d451-4c26-b84e-5b056fe1e93b" (UID: "189a34a7-d451-4c26-b84e-5b056fe1e93b"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:27 crc kubenswrapper[4876]: I1215 07:16:27.976796 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican2a14-account-delete-mbwx7"] Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.023543 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "8fc3485b-9f78-40d0-b864-b40626fdba7c" (UID: "8fc3485b-9f78-40d0-b864-b40626fdba7c"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.023767 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/189a34a7-d451-4c26-b84e-5b056fe1e93b-kube-api-access-zmzm4" (OuterVolumeSpecName: "kube-api-access-zmzm4") pod "189a34a7-d451-4c26-b84e-5b056fe1e93b" (UID: "189a34a7-d451-4c26-b84e-5b056fe1e93b"). InnerVolumeSpecName "kube-api-access-zmzm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.033817 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutrona915-account-delete-gswz2"] Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.036386 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "189a34a7-d451-4c26-b84e-5b056fe1e93b" (UID: "189a34a7-d451-4c26-b84e-5b056fe1e93b"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.036882 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fc3485b-9f78-40d0-b864-b40626fdba7c-kube-api-access-49bdz" (OuterVolumeSpecName: "kube-api-access-49bdz") pod "8fc3485b-9f78-40d0-b864-b40626fdba7c" (UID: "8fc3485b-9f78-40d0-b864-b40626fdba7c"). InnerVolumeSpecName "kube-api-access-49bdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.054610 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-swift-storage-0\") pod \"db2340b9-e33b-432a-a1e6-df022337da1c\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.054712 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-combined-ca-bundle\") pod \"e3ade031-559a-4a09-b707-ab081a659fc9\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.054807 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-nb\") pod \"db2340b9-e33b-432a-a1e6-df022337da1c\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.054861 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config\") pod \"e3ade031-559a-4a09-b707-ab081a659fc9\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.054890 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config-secret\") pod \"e3ade031-559a-4a09-b707-ab081a659fc9\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.054914 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d78lc\" (UniqueName: \"kubernetes.io/projected/db2340b9-e33b-432a-a1e6-df022337da1c-kube-api-access-d78lc\") pod \"db2340b9-e33b-432a-a1e6-df022337da1c\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.054956 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-config\") pod \"db2340b9-e33b-432a-a1e6-df022337da1c\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.054985 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-sb\") pod \"db2340b9-e33b-432a-a1e6-df022337da1c\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.055021 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-svc\") pod \"db2340b9-e33b-432a-a1e6-df022337da1c\" (UID: \"db2340b9-e33b-432a-a1e6-df022337da1c\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.055038 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9smdv\" (UniqueName: \"kubernetes.io/projected/e3ade031-559a-4a09-b707-ab081a659fc9-kube-api-access-9smdv\") pod \"e3ade031-559a-4a09-b707-ab081a659fc9\" (UID: \"e3ade031-559a-4a09-b707-ab081a659fc9\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.056937 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.056960 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.056980 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.056992 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49bdz\" (UniqueName: \"kubernetes.io/projected/8fc3485b-9f78-40d0-b864-b40626fdba7c-kube-api-access-49bdz\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.057006 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.057016 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.057027 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.057036 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmzm4\" (UniqueName: \"kubernetes.io/projected/189a34a7-d451-4c26-b84e-5b056fe1e93b-kube-api-access-zmzm4\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.057047 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/189a34a7-d451-4c26-b84e-5b056fe1e93b-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.057055 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc3485b-9f78-40d0-b864-b40626fdba7c-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.080338 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db2340b9-e33b-432a-a1e6-df022337da1c-kube-api-access-d78lc" (OuterVolumeSpecName: "kube-api-access-d78lc") pod "db2340b9-e33b-432a-a1e6-df022337da1c" (UID: "db2340b9-e33b-432a-a1e6-df022337da1c"). InnerVolumeSpecName "kube-api-access-d78lc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.087647 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3ade031-559a-4a09-b707-ab081a659fc9-kube-api-access-9smdv" (OuterVolumeSpecName: "kube-api-access-9smdv") pod "e3ade031-559a-4a09-b707-ab081a659fc9" (UID: "e3ade031-559a-4a09-b707-ab081a659fc9"). InnerVolumeSpecName "kube-api-access-9smdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.160341 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d78lc\" (UniqueName: \"kubernetes.io/projected/db2340b9-e33b-432a-a1e6-df022337da1c-kube-api-access-d78lc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.160372 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9smdv\" (UniqueName: \"kubernetes.io/projected/e3ade031-559a-4a09-b707-ab081a659fc9-kube-api-access-9smdv\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.160432 4876 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.160477 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data podName:db3f4964-0cca-4527-93de-457292de4be7 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:32.160464032 +0000 UTC m=+1517.731606943 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data") pod "rabbitmq-server-0" (UID: "db3f4964-0cca-4527-93de-457292de4be7") : configmap "rabbitmq-config-data" not found Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.191032 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.230035 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.230090 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.230135 4876 scope.go:117] "RemoveContainer" containerID="f7ed19e30015229d68cc3e45a01331aa4c6921bd64f97228380c22118f785d84" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.230712 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.230922 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.235443 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.248206 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8fc3485b-9f78-40d0-b864-b40626fdba7c/ovsdbserver-sb/0.log" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.253797 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.255446 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8fc3485b-9f78-40d0-b864-b40626fdba7c","Type":"ContainerDied","Data":"6b96d651c4b9c1327100dfb83be8a8e30f1e39e1bb7266e282afd7bbcc80ac6b"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.262278 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-config-data\") pod \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.262307 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-vencrypt-tls-certs\") pod \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.262345 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-nova-novncproxy-tls-certs\") pod \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.262566 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-combined-ca-bundle\") pod \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.265133 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl2zf\" (UniqueName: \"kubernetes.io/projected/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-kube-api-access-cl2zf\") pod \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\" (UID: \"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738\") " Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.265650 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.273092 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.281645 4876 generic.go:334] "Generic (PLEG): container finished" podID="f7e97610-516d-4609-911c-53124ace7db0" containerID="409c22ba1b246c89eacddbba5e405955d8477034e987e355d0b9e54624d39f46" exitCode=143 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.281712 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" event={"ID":"f7e97610-516d-4609-911c-53124ace7db0","Type":"ContainerDied","Data":"409c22ba1b246c89eacddbba5e405955d8477034e987e355d0b9e54624d39f46"} Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.289037 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.289948 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "db2340b9-e33b-432a-a1e6-df022337da1c" (UID: "db2340b9-e33b-432a-a1e6-df022337da1c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.293946 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.296125 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.296261 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" containerName="nova-scheduler-scheduler" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.297401 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" event={"ID":"db2340b9-e33b-432a-a1e6-df022337da1c","Type":"ContainerDied","Data":"7e98a905c7b247ec8087d10468e46383c2341416520a7e49dd62daaf5dd9164b"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.297464 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ddd577785-dsmf2" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.303367 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-kube-api-access-cl2zf" (OuterVolumeSpecName: "kube-api-access-cl2zf") pod "ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" (UID: "ba4acd07-a5cb-47a7-9f1f-0bc818d3f738"). InnerVolumeSpecName "kube-api-access-cl2zf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.304650 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0","Type":"ContainerDied","Data":"673940f40218b378a3a021cc4f2020bd1bf2de102f6afab63a132d17f98cd088"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.300757 4876 generic.go:334] "Generic (PLEG): container finished" podID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerID="673940f40218b378a3a021cc4f2020bd1bf2de102f6afab63a132d17f98cd088" exitCode=143 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.320744 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutrona915-account-delete-gswz2" event={"ID":"d5c5b73f-18fa-4bef-b443-15328769a818","Type":"ContainerStarted","Data":"9be7ac597f2cf9e90dd302b2eeea88feed25d6c598eb222d04e5fb3791882102"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.334792 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance2c9d-account-delete-jqhxv" event={"ID":"d24022bd-28ab-402d-9078-c52208891ef8","Type":"ContainerStarted","Data":"db9317417bd58e1f6f0e55391ae407bf3a99d22c99c1e41049173057f98cdb3b"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.334841 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance2c9d-account-delete-jqhxv" event={"ID":"d24022bd-28ab-402d-9078-c52208891ef8","Type":"ContainerStarted","Data":"2d51fea6c33018b2ce10b6d031602046c199329f7659519f8190c59f711d5100"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.340488 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapic196-account-delete-j7dqh"] Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.367305 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.367344 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.367361 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl2zf\" (UniqueName: \"kubernetes.io/projected/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-kube-api-access-cl2zf\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.368772 4876 secret.go:188] Couldn't get secret openstack/cinder-scripts: secret "cinder-scripts" not found Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.369335 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts podName:bcf47e8b-49d3-45cb-a496-b3a5a256cc5c nodeName:}" failed. No retries permitted until 2025-12-15 07:16:32.369314759 +0000 UTC m=+1517.940457670 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts") pod "cinder-api-0" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c") : secret "cinder-scripts" not found Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.368099 4876 secret.go:188] Couldn't get secret openstack/cinder-config-data: secret "cinder-config-data" not found Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.378047 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data podName:bcf47e8b-49d3-45cb-a496-b3a5a256cc5c nodeName:}" failed. No retries permitted until 2025-12-15 07:16:32.378016313 +0000 UTC m=+1517.949159224 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data") pod "cinder-api-0" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c") : secret "cinder-config-data" not found Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.379527 4876 generic.go:334] "Generic (PLEG): container finished" podID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerID="9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.379629 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-57dd59bc-td2ns" event={"ID":"1e9b5486-d793-41dc-b5f1-1f2085d7db79","Type":"ContainerDied","Data":"9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.393845 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance2c9d-account-delete-jqhxv" podStartSLOduration=4.393802729 podStartE2EDuration="4.393802729s" podCreationTimestamp="2025-12-15 07:16:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:16:28.377638094 +0000 UTC m=+1513.948781015" watchObservedRunningTime="2025-12-15 07:16:28.393802729 +0000 UTC m=+1513.964945650" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.397264 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"79b75337-8832-415e-a91f-2f8edd407cf1","Type":"ContainerDied","Data":"4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.397331 4876 generic.go:334] "Generic (PLEG): container finished" podID="79b75337-8832-415e-a91f-2f8edd407cf1" containerID="4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.398734 4876 generic.go:334] "Generic (PLEG): container finished" podID="ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" containerID="7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.398771 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738","Type":"ContainerDied","Data":"7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.398788 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ba4acd07-a5cb-47a7-9f1f-0bc818d3f738","Type":"ContainerDied","Data":"4f714935ef68bdd21888a723081d129d8c2a2d7d72387377eabd8eda80890662"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.398831 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.401300 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican2a14-account-delete-mbwx7" event={"ID":"1501e554-af82-4179-959b-475a30b910d5","Type":"ContainerStarted","Data":"655465f7ba0c472bb7728b449b956869a7ac09d9d41976e75c800a71a4968690"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.404845 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement4aed-account-delete-jwsnh" event={"ID":"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71","Type":"ContainerStarted","Data":"49c1ed2ed8b735625be1e2057223049f7cfed2672d833f8281a8f3dd7362e83f"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.404870 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement4aed-account-delete-jwsnh" event={"ID":"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71","Type":"ContainerStarted","Data":"bcb218540fc6cdde8ce41d1a56c550b5201a8636db083165b266cd207dbbcd67"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.407870 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.413847 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderfc60-account-delete-vg9nk" event={"ID":"233c0ff5-7edd-41c4-8e16-7de48b9fc76c","Type":"ContainerStarted","Data":"f0dfdb4d9e96656537226abcfccedda1e0b17902fdf09faf03792277c676a36e"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.413891 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderfc60-account-delete-vg9nk" event={"ID":"233c0ff5-7edd-41c4-8e16-7de48b9fc76c","Type":"ContainerStarted","Data":"8276da630e544256d45873d05827f28ba9bafa519d18620c65e9e87192ea5b40"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.414159 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8fc3485b-9f78-40d0-b864-b40626fdba7c" (UID: "8fc3485b-9f78-40d0-b864-b40626fdba7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.428351 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "189a34a7-d451-4c26-b84e-5b056fe1e93b" (UID: "189a34a7-d451-4c26-b84e-5b056fe1e93b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.443072 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement4aed-account-delete-jwsnh" podStartSLOduration=4.443050859 podStartE2EDuration="4.443050859s" podCreationTimestamp="2025-12-15 07:16:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:16:28.428910837 +0000 UTC m=+1514.000053768" watchObservedRunningTime="2025-12-15 07:16:28.443050859 +0000 UTC m=+1514.014193770" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.469188 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.469227 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.482260 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinderfc60-account-delete-vg9nk" podStartSLOduration=4.482241117 podStartE2EDuration="4.482241117s" podCreationTimestamp="2025-12-15 07:16:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:16:28.449140553 +0000 UTC m=+1514.020283464" watchObservedRunningTime="2025-12-15 07:16:28.482241117 +0000 UTC m=+1514.053384018" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.491524 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3ade031-559a-4a09-b707-ab081a659fc9" (UID: "e3ade031-559a-4a09-b707-ab081a659fc9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.502917 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.502955 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.502965 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.502975 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.502985 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.502994 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.503003 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9" exitCode=0 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.503081 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.503944 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.503983 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.503997 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.504009 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.504020 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.504030 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.504041 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9"} Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.504087 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-knn2n" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.523493 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell0d7a8-account-delete-265rv"] Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.536554 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-7657c647b5-xksrd"] Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.536801 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-7657c647b5-xksrd" podUID="769bb0f8-96fe-485a-adfd-e51747bbff86" containerName="proxy-httpd" containerID="cri-o://476fd12dd3cb5e2c27a25b26e39e28f8bb6e1c10090223c4ed86166489cea477" gracePeriod=30 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.536965 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-7657c647b5-xksrd" podUID="769bb0f8-96fe-485a-adfd-e51747bbff86" containerName="proxy-server" containerID="cri-o://39f90703ccba40f7e2a435957a70f4bff9580120e427f263883cc1f2fc48d1e8" gracePeriod=30 Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.563229 4876 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 15 07:16:28 crc kubenswrapper[4876]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-12-15T07:16:26Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 15 07:16:28 crc kubenswrapper[4876]: /etc/init.d/functions: line 589: 386 Alarm clock "$@" Dec 15 07:16:28 crc kubenswrapper[4876]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-pr47l" message=< Dec 15 07:16:28 crc kubenswrapper[4876]: Exiting ovn-controller (1) [FAILED] Dec 15 07:16:28 crc kubenswrapper[4876]: Killing ovn-controller (1) [ OK ] Dec 15 07:16:28 crc kubenswrapper[4876]: 2025-12-15T07:16:26Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 15 07:16:28 crc kubenswrapper[4876]: /etc/init.d/functions: line 589: 386 Alarm clock "$@" Dec 15 07:16:28 crc kubenswrapper[4876]: > Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.563296 4876 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 15 07:16:28 crc kubenswrapper[4876]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-12-15T07:16:26Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Dec 15 07:16:28 crc kubenswrapper[4876]: /etc/init.d/functions: line 589: 386 Alarm clock "$@" Dec 15 07:16:28 crc kubenswrapper[4876]: > pod="openstack/ovn-controller-pr47l" podUID="df0aba13-046d-4950-bfa3-c873c535847f" containerName="ovn-controller" containerID="cri-o://d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.563340 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-pr47l" podUID="df0aba13-046d-4950-bfa3-c873c535847f" containerName="ovn-controller" containerID="cri-o://d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489" gracePeriod=27 Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.565298 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-pr47l" podUID="df0aba13-046d-4950-bfa3-c873c535847f" containerName="ovn-controller" probeResult="failure" output="" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.582597 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "189a34a7-d451-4c26-b84e-5b056fe1e93b" (UID: "189a34a7-d451-4c26-b84e-5b056fe1e93b"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.592347 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "8fc3485b-9f78-40d0-b864-b40626fdba7c" (UID: "8fc3485b-9f78-40d0-b864-b40626fdba7c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.607588 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.607616 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.607633 4876 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.614479 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" (UID: "ba4acd07-a5cb-47a7-9f1f-0bc818d3f738"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.710679 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "e3ade031-559a-4a09-b707-ab081a659fc9" (UID: "e3ade031-559a-4a09-b707-ab081a659fc9"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.710918 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.710935 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.724729 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61240875-a8dd-44c2-9082-f9270de53161" path="/var/lib/kubelet/pods/61240875-a8dd-44c2-9082-f9270de53161/volumes" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.725446 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="686f7e40-e2a2-486b-83b0-4a0bc33f15e3" path="/var/lib/kubelet/pods/686f7e40-e2a2-486b-83b0-4a0bc33f15e3/volumes" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.766618 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-config-data" (OuterVolumeSpecName: "config-data") pod "ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" (UID: "ba4acd07-a5cb-47a7-9f1f-0bc818d3f738"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.781115 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "db2340b9-e33b-432a-a1e6-df022337da1c" (UID: "db2340b9-e33b-432a-a1e6-df022337da1c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.801426 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "db2340b9-e33b-432a-a1e6-df022337da1c" (UID: "db2340b9-e33b-432a-a1e6-df022337da1c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.815033 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.815060 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.815073 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.839176 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "db2340b9-e33b-432a-a1e6-df022337da1c" (UID: "db2340b9-e33b-432a-a1e6-df022337da1c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.856930 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e3ade031-559a-4a09-b707-ab081a659fc9" (UID: "e3ade031-559a-4a09-b707-ab081a659fc9"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.856932 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "8fc3485b-9f78-40d0-b864-b40626fdba7c" (UID: "8fc3485b-9f78-40d0-b864-b40626fdba7c"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.864311 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" (UID: "87fa47d9-4ce3-4233-baf3-a5b86ab26dd1"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.884966 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-config" (OuterVolumeSpecName: "config") pod "db2340b9-e33b-432a-a1e6-df022337da1c" (UID: "db2340b9-e33b-432a-a1e6-df022337da1c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.899141 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" (UID: "ba4acd07-a5cb-47a7-9f1f-0bc818d3f738"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.923527 4876 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.923549 4876 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.923557 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fc3485b-9f78-40d0-b864-b40626fdba7c-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.923565 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3ade031-559a-4a09-b707-ab081a659fc9-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.923573 4876 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: I1215 07:16:28.923581 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db2340b9-e33b-432a-a1e6-df022337da1c-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.923632 4876 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 15 07:16:28 crc kubenswrapper[4876]: E1215 07:16:28.923677 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data podName:2a1020a1-7afe-46ee-b5c4-40a9290a05e1 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:32.9236643 +0000 UTC m=+1518.494807211 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data") pod "rabbitmq-cell1-server-0" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1") : configmap "rabbitmq-cell1-config-data" not found Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.007274 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "189a34a7-d451-4c26-b84e-5b056fe1e93b" (UID: "189a34a7-d451-4c26-b84e-5b056fe1e93b"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.022544 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" (UID: "ba4acd07-a5cb-47a7-9f1f-0bc818d3f738"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.025684 4876 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/189a34a7-d451-4c26-b84e-5b056fe1e93b-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.025727 4876 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.286334 4876 scope.go:117] "RemoveContainer" containerID="a317e6932e2fdc31885ac1215fbae30ebcc70d8d5b074c22c0c2b75c7095ebd6" Dec 15 07:16:29 crc kubenswrapper[4876]: E1215 07:16:29.286457 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951 is running failed: container process not found" containerID="3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 07:16:29 crc kubenswrapper[4876]: E1215 07:16:29.294629 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951 is running failed: container process not found" containerID="3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 07:16:29 crc kubenswrapper[4876]: E1215 07:16:29.300826 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951 is running failed: container process not found" containerID="3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 07:16:29 crc kubenswrapper[4876]: E1215 07:16:29.300884 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="7a01ad7f-0924-4b3d-ba95-b5e599f343ee" containerName="nova-cell0-conductor-conductor" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.446573 4876 scope.go:117] "RemoveContainer" containerID="10e3e7fbcb8c5a26a5888a2b01f66cead7b43ca5c9e509f9062271dd99393b62" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.494519 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.542379 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6cz5\" (UniqueName: \"kubernetes.io/projected/1503039d-0445-46ac-81ca-5af528a46ce2-kube-api-access-h6cz5\") pod \"1503039d-0445-46ac-81ca-5af528a46ce2\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.542427 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"1503039d-0445-46ac-81ca-5af528a46ce2\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.542459 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-generated\") pod \"1503039d-0445-46ac-81ca-5af528a46ce2\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.542506 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-operator-scripts\") pod \"1503039d-0445-46ac-81ca-5af528a46ce2\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.542588 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-galera-tls-certs\") pod \"1503039d-0445-46ac-81ca-5af528a46ce2\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.542610 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-default\") pod \"1503039d-0445-46ac-81ca-5af528a46ce2\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.542684 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-kolla-config\") pod \"1503039d-0445-46ac-81ca-5af528a46ce2\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.542715 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-combined-ca-bundle\") pod \"1503039d-0445-46ac-81ca-5af528a46ce2\" (UID: \"1503039d-0445-46ac-81ca-5af528a46ce2\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.549914 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "1503039d-0445-46ac-81ca-5af528a46ce2" (UID: "1503039d-0445-46ac-81ca-5af528a46ce2"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.551488 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "1503039d-0445-46ac-81ca-5af528a46ce2" (UID: "1503039d-0445-46ac-81ca-5af528a46ce2"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.553357 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "1503039d-0445-46ac-81ca-5af528a46ce2" (UID: "1503039d-0445-46ac-81ca-5af528a46ce2"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.559022 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1503039d-0445-46ac-81ca-5af528a46ce2" (UID: "1503039d-0445-46ac-81ca-5af528a46ce2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.559745 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-pr47l_df0aba13-046d-4950-bfa3-c873c535847f/ovn-controller/0.log" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.559832 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pr47l" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.567273 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1503039d-0445-46ac-81ca-5af528a46ce2-kube-api-access-h6cz5" (OuterVolumeSpecName: "kube-api-access-h6cz5") pod "1503039d-0445-46ac-81ca-5af528a46ce2" (UID: "1503039d-0445-46ac-81ca-5af528a46ce2"). InnerVolumeSpecName "kube-api-access-h6cz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.645453 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic196-account-delete-j7dqh" event={"ID":"478129d4-b31d-47e4-a6d4-b4aa7416356e","Type":"ContainerStarted","Data":"6a0be5594bf1164434d9a16c353a1598f9b8d028647a4bfad9be45335e2b960c"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.649333 4876 scope.go:117] "RemoveContainer" containerID="ef7fbf45baeea2399584cf009bc9639d23e5763a916d5051a8fd664fd8dbafa0" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.649485 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-log-ovn\") pod \"df0aba13-046d-4950-bfa3-c873c535847f\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.649563 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/df0aba13-046d-4950-bfa3-c873c535847f-scripts\") pod \"df0aba13-046d-4950-bfa3-c873c535847f\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.649606 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-combined-ca-bundle\") pod \"df0aba13-046d-4950-bfa3-c873c535847f\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.649666 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run\") pod \"df0aba13-046d-4950-bfa3-c873c535847f\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.649777 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run-ovn\") pod \"df0aba13-046d-4950-bfa3-c873c535847f\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.649847 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-ovn-controller-tls-certs\") pod \"df0aba13-046d-4950-bfa3-c873c535847f\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.649897 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bv942\" (UniqueName: \"kubernetes.io/projected/df0aba13-046d-4950-bfa3-c873c535847f-kube-api-access-bv942\") pod \"df0aba13-046d-4950-bfa3-c873c535847f\" (UID: \"df0aba13-046d-4950-bfa3-c873c535847f\") " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.650265 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "df0aba13-046d-4950-bfa3-c873c535847f" (UID: "df0aba13-046d-4950-bfa3-c873c535847f"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.650592 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6cz5\" (UniqueName: \"kubernetes.io/projected/1503039d-0445-46ac-81ca-5af528a46ce2-kube-api-access-h6cz5\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.650625 4876 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.650639 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.650651 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.650664 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.650680 4876 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1503039d-0445-46ac-81ca-5af528a46ce2-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.651443 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "df0aba13-046d-4950-bfa3-c873c535847f" (UID: "df0aba13-046d-4950-bfa3-c873c535847f"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.651495 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run" (OuterVolumeSpecName: "var-run") pod "df0aba13-046d-4950-bfa3-c873c535847f" (UID: "df0aba13-046d-4950-bfa3-c873c535847f"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.651736 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df0aba13-046d-4950-bfa3-c873c535847f-scripts" (OuterVolumeSpecName: "scripts") pod "df0aba13-046d-4950-bfa3-c873c535847f" (UID: "df0aba13-046d-4950-bfa3-c873c535847f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.654217 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-pr47l_df0aba13-046d-4950-bfa3-c873c535847f/ovn-controller/0.log" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.654248 4876 generic.go:334] "Generic (PLEG): container finished" podID="df0aba13-046d-4950-bfa3-c873c535847f" containerID="d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489" exitCode=143 Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.654302 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pr47l" event={"ID":"df0aba13-046d-4950-bfa3-c873c535847f","Type":"ContainerDied","Data":"d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.654325 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-pr47l" event={"ID":"df0aba13-046d-4950-bfa3-c873c535847f","Type":"ContainerDied","Data":"1595e26b4d2f02ef6954d967db7d10ff60f6e5959b4cdf59901ea1ce3b924c93"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.654355 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-pr47l" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.654914 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "1503039d-0445-46ac-81ca-5af528a46ce2" (UID: "1503039d-0445-46ac-81ca-5af528a46ce2"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.655335 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0d7a8-account-delete-265rv" event={"ID":"bd3ee522-830b-4cde-ad14-15bcc28b7d06","Type":"ContainerStarted","Data":"198fa3779f2d36503903806904e03b88ca096ae33da44949215b381f38d492eb"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.657305 4876 generic.go:334] "Generic (PLEG): container finished" podID="d24022bd-28ab-402d-9078-c52208891ef8" containerID="db9317417bd58e1f6f0e55391ae407bf3a99d22c99c1e41049173057f98cdb3b" exitCode=0 Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.657360 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance2c9d-account-delete-jqhxv" event={"ID":"d24022bd-28ab-402d-9078-c52208891ef8","Type":"ContainerDied","Data":"db9317417bd58e1f6f0e55391ae407bf3a99d22c99c1e41049173057f98cdb3b"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.658598 4876 generic.go:334] "Generic (PLEG): container finished" podID="233c0ff5-7edd-41c4-8e16-7de48b9fc76c" containerID="f0dfdb4d9e96656537226abcfccedda1e0b17902fdf09faf03792277c676a36e" exitCode=0 Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.658633 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderfc60-account-delete-vg9nk" event={"ID":"233c0ff5-7edd-41c4-8e16-7de48b9fc76c","Type":"ContainerDied","Data":"f0dfdb4d9e96656537226abcfccedda1e0b17902fdf09faf03792277c676a36e"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.661821 4876 generic.go:334] "Generic (PLEG): container finished" podID="769bb0f8-96fe-485a-adfd-e51747bbff86" containerID="39f90703ccba40f7e2a435957a70f4bff9580120e427f263883cc1f2fc48d1e8" exitCode=0 Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.661842 4876 generic.go:334] "Generic (PLEG): container finished" podID="769bb0f8-96fe-485a-adfd-e51747bbff86" containerID="476fd12dd3cb5e2c27a25b26e39e28f8bb6e1c10090223c4ed86166489cea477" exitCode=0 Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.661901 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7657c647b5-xksrd" event={"ID":"769bb0f8-96fe-485a-adfd-e51747bbff86","Type":"ContainerDied","Data":"39f90703ccba40f7e2a435957a70f4bff9580120e427f263883cc1f2fc48d1e8"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.661920 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7657c647b5-xksrd" event={"ID":"769bb0f8-96fe-485a-adfd-e51747bbff86","Type":"ContainerDied","Data":"476fd12dd3cb5e2c27a25b26e39e28f8bb6e1c10090223c4ed86166489cea477"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.677861 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df0aba13-046d-4950-bfa3-c873c535847f-kube-api-access-bv942" (OuterVolumeSpecName: "kube-api-access-bv942") pod "df0aba13-046d-4950-bfa3-c873c535847f" (UID: "df0aba13-046d-4950-bfa3-c873c535847f"). InnerVolumeSpecName "kube-api-access-bv942". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.709152 4876 generic.go:334] "Generic (PLEG): container finished" podID="e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71" containerID="49c1ed2ed8b735625be1e2057223049f7cfed2672d833f8281a8f3dd7362e83f" exitCode=0 Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.709578 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement4aed-account-delete-jwsnh" event={"ID":"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71","Type":"ContainerDied","Data":"49c1ed2ed8b735625be1e2057223049f7cfed2672d833f8281a8f3dd7362e83f"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.751351 4876 generic.go:334] "Generic (PLEG): container finished" podID="1503039d-0445-46ac-81ca-5af528a46ce2" containerID="da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787" exitCode=0 Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.751429 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1503039d-0445-46ac-81ca-5af528a46ce2","Type":"ContainerDied","Data":"da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.751453 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1503039d-0445-46ac-81ca-5af528a46ce2","Type":"ContainerDied","Data":"7f3dd8520eb7cc36569d304f7135a2b54c44cbd7a13dbaf1858a9ca184329258"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.751507 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.754227 4876 generic.go:334] "Generic (PLEG): container finished" podID="7a01ad7f-0924-4b3d-ba95-b5e599f343ee" containerID="3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951" exitCode=0 Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.754253 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7a01ad7f-0924-4b3d-ba95-b5e599f343ee","Type":"ContainerDied","Data":"3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951"} Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.755991 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bv942\" (UniqueName: \"kubernetes.io/projected/df0aba13-046d-4950-bfa3-c873c535847f-kube-api-access-bv942\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.756027 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.756037 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/df0aba13-046d-4950-bfa3-c873c535847f-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.756046 4876 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.756054 4876 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/df0aba13-046d-4950-bfa3-c873c535847f-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.810768 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df0aba13-046d-4950-bfa3-c873c535847f" (UID: "df0aba13-046d-4950-bfa3-c873c535847f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.857466 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.938681 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.958421 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:29 crc kubenswrapper[4876]: I1215 07:16:29.999309 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1503039d-0445-46ac-81ca-5af528a46ce2" (UID: "1503039d-0445-46ac-81ca-5af528a46ce2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.004812 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "1503039d-0445-46ac-81ca-5af528a46ce2" (UID: "1503039d-0445-46ac-81ca-5af528a46ce2"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.060560 4876 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.060593 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1503039d-0445-46ac-81ca-5af528a46ce2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.098376 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "df0aba13-046d-4950-bfa3-c873c535847f" (UID: "df0aba13-046d-4950-bfa3-c873c535847f"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.116288 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.116607 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="ceilometer-central-agent" containerID="cri-o://34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1" gracePeriod=30 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.117038 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="proxy-httpd" containerID="cri-o://76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9" gracePeriod=30 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.117079 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="sg-core" containerID="cri-o://3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0" gracePeriod=30 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.117133 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="ceilometer-notification-agent" containerID="cri-o://48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9" gracePeriod=30 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.145620 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.167266 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.161:8776/healthcheck\": dial tcp 10.217.0.161:8776: connect: connection refused" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.167627 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/df0aba13-046d-4950-bfa3-c873c535847f-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.235160 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.235378 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="dd0403aa-07db-43b1-8df6-6317130cbd53" containerName="memcached" containerID="cri-o://90ed53ef014d70db18f442d59174c6772f199e8d4213675027ef5c79f6be1513" gracePeriod=30 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.278175 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-94d2c"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.304496 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-8jtjq"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.310385 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": dial tcp 10.217.0.203:8775: connect: connection refused" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.323898 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-8jtjq"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.327161 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": dial tcp 10.217.0.203:8775: connect: connection refused" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.336331 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.376739 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-94d2c"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.397218 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-8689b9f5b5-zrv9l"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.397554 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-8689b9f5b5-zrv9l" podUID="d724d787-7189-4c81-94bf-08c2904deaf9" containerName="keystone-api" containerID="cri-o://1332d0e465fbd1defca4d9cc7fb36daa61b5bbbf7ddc14584dbbe92e062e9452" gracePeriod=30 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.399307 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.402321 4876 scope.go:117] "RemoveContainer" containerID="e41e2134ca24c29bb330d58bfc1c524aefcc9b5b0be72e853c4d8698c1fef59b" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.418195 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ddd577785-dsmf2"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478487 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-log-httpd\") pod \"769bb0f8-96fe-485a-adfd-e51747bbff86\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478532 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-public-tls-certs\") pod \"769bb0f8-96fe-485a-adfd-e51747bbff86\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478564 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whp4q\" (UniqueName: \"kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-kube-api-access-whp4q\") pod \"769bb0f8-96fe-485a-adfd-e51747bbff86\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478610 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-combined-ca-bundle\") pod \"769bb0f8-96fe-485a-adfd-e51747bbff86\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478639 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-combined-ca-bundle\") pod \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478666 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-config-data\") pod \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478698 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rp9ff\" (UniqueName: \"kubernetes.io/projected/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-kube-api-access-rp9ff\") pod \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\" (UID: \"7a01ad7f-0924-4b3d-ba95-b5e599f343ee\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478753 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-config-data\") pod \"769bb0f8-96fe-485a-adfd-e51747bbff86\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478798 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-internal-tls-certs\") pod \"769bb0f8-96fe-485a-adfd-e51747bbff86\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478881 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-etc-swift\") pod \"769bb0f8-96fe-485a-adfd-e51747bbff86\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.478940 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-run-httpd\") pod \"769bb0f8-96fe-485a-adfd-e51747bbff86\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.493139 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "769bb0f8-96fe-485a-adfd-e51747bbff86" (UID: "769bb0f8-96fe-485a-adfd-e51747bbff86"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.494381 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "769bb0f8-96fe-485a-adfd-e51747bbff86" (UID: "769bb0f8-96fe-485a-adfd-e51747bbff86"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.505033 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.529321 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ddd577785-dsmf2"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.536494 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-knn2n"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.558964 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-kube-api-access-whp4q" (OuterVolumeSpecName: "kube-api-access-whp4q") pod "769bb0f8-96fe-485a-adfd-e51747bbff86" (UID: "769bb0f8-96fe-485a-adfd-e51747bbff86"). InnerVolumeSpecName "kube-api-access-whp4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.564492 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-knn2n"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.566781 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "769bb0f8-96fe-485a-adfd-e51747bbff86" (UID: "769bb0f8-96fe-485a-adfd-e51747bbff86"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.576667 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.582616 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-kube-api-access-rp9ff" (OuterVolumeSpecName: "kube-api-access-rp9ff") pod "7a01ad7f-0924-4b3d-ba95-b5e599f343ee" (UID: "7a01ad7f-0924-4b3d-ba95-b5e599f343ee"). InnerVolumeSpecName "kube-api-access-rp9ff". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.599918 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-b6z9x"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.612796 4876 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.612834 4876 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/769bb0f8-96fe-485a-adfd-e51747bbff86-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.612872 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whp4q\" (UniqueName: \"kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-kube-api-access-whp4q\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.612884 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rp9ff\" (UniqueName: \"kubernetes.io/projected/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-kube-api-access-rp9ff\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.612892 4876 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/769bb0f8-96fe-485a-adfd-e51747bbff86-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.615643 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.623000 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-b6z9x"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.630494 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.640821 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6475-account-create-update-v8dvd"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.646575 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.652716 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-6475-account-create-update-v8dvd"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.666638 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.680125 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.681811 4876 scope.go:117] "RemoveContainer" containerID="7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.683864 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.730191 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1503039d-0445-46ac-81ca-5af528a46ce2" path="/var/lib/kubelet/pods/1503039d-0445-46ac-81ca-5af528a46ce2/volumes" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.730908 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="189a34a7-d451-4c26-b84e-5b056fe1e93b" path="/var/lib/kubelet/pods/189a34a7-d451-4c26-b84e-5b056fe1e93b/volumes" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.731538 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24739a2a-2898-4b24-a9db-3845fbda2625" path="/var/lib/kubelet/pods/24739a2a-2898-4b24-a9db-3845fbda2625/volumes" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.734720 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="462339ed-ab46-4daf-b748-0257287f53cd" path="/var/lib/kubelet/pods/462339ed-ab46-4daf-b748-0257287f53cd/volumes" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.735315 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7657f9d3-81bb-41d0-9c40-43f697875a5e" path="/var/lib/kubelet/pods/7657f9d3-81bb-41d0-9c40-43f697875a5e/volumes" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.735891 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" path="/var/lib/kubelet/pods/87fa47d9-4ce3-4233-baf3-a5b86ab26dd1/volumes" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.740272 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fc3485b-9f78-40d0-b864-b40626fdba7c" path="/var/lib/kubelet/pods/8fc3485b-9f78-40d0-b864-b40626fdba7c/volumes" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.740831 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db2340b9-e33b-432a-a1e6-df022337da1c" path="/var/lib/kubelet/pods/db2340b9-e33b-432a-a1e6-df022337da1c/volumes" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.742925 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3ade031-559a-4a09-b707-ab081a659fc9" path="/var/lib/kubelet/pods/e3ade031-559a-4a09-b707-ab081a659fc9/volumes" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.743697 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb7788fd-6d1c-4251-9629-2f550c34b522" path="/var/lib/kubelet/pods/fb7788fd-6d1c-4251-9629-2f550c34b522/volumes" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.772297 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7a01ad7f-0924-4b3d-ba95-b5e599f343ee" (UID: "7a01ad7f-0924-4b3d-ba95-b5e599f343ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.776159 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-config-data" (OuterVolumeSpecName: "config-data") pod "7a01ad7f-0924-4b3d-ba95-b5e599f343ee" (UID: "7a01ad7f-0924-4b3d-ba95-b5e599f343ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.799499 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "769bb0f8-96fe-485a-adfd-e51747bbff86" (UID: "769bb0f8-96fe-485a-adfd-e51747bbff86"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.816651 4876 generic.go:334] "Generic (PLEG): container finished" podID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerID="0adedfb29f59a78ed0ca95dedd5200ded03f6c4a9258dcf41996ef3271579c3d" exitCode=0 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.819221 4876 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.819241 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.819252 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7a01ad7f-0924-4b3d-ba95-b5e599f343ee-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.822183 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8774998dd-ps8vx" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": dial tcp 10.217.0.162:9311: connect: connection refused" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.822236 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8774998dd-ps8vx" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": dial tcp 10.217.0.162:9311: connect: connection refused" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.824009 4876 generic.go:334] "Generic (PLEG): container finished" podID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerID="ab2b7bcd77dc096c6f8bd4c255e23eb26198e47b55d43f5eb7d82a47546b3194" exitCode=0 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.849547 4876 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/neutrona915-account-delete-gswz2" secret="" err="secret \"galera-openstack-dockercfg-q4blc\" not found" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.857339 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutrona915-account-delete-gswz2" podStartSLOduration=6.857312397 podStartE2EDuration="6.857312397s" podCreationTimestamp="2025-12-15 07:16:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:16:30.850028741 +0000 UTC m=+1516.421171652" watchObservedRunningTime="2025-12-15 07:16:30.857312397 +0000 UTC m=+1516.428455308" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.878690 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "769bb0f8-96fe-485a-adfd-e51747bbff86" (UID: "769bb0f8-96fe-485a-adfd-e51747bbff86"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.881314 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-config-data" (OuterVolumeSpecName: "config-data") pod "769bb0f8-96fe-485a-adfd-e51747bbff86" (UID: "769bb0f8-96fe-485a-adfd-e51747bbff86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.919609 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "769bb0f8-96fe-485a-adfd-e51747bbff86" (UID: "769bb0f8-96fe-485a-adfd-e51747bbff86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.920650 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-combined-ca-bundle\") pod \"769bb0f8-96fe-485a-adfd-e51747bbff86\" (UID: \"769bb0f8-96fe-485a-adfd-e51747bbff86\") " Dec 15 07:16:30 crc kubenswrapper[4876]: W1215 07:16:30.921802 4876 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/769bb0f8-96fe-485a-adfd-e51747bbff86/volumes/kubernetes.io~secret/combined-ca-bundle Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.921845 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "769bb0f8-96fe-485a-adfd-e51747bbff86" (UID: "769bb0f8-96fe-485a-adfd-e51747bbff86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.929254 4876 generic.go:334] "Generic (PLEG): container finished" podID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" containerID="1cb73a4bf7ed52b3476b312fe18978e8dfe39c5143fc358708eff5808b066bce" exitCode=0 Dec 15 07:16:30 crc kubenswrapper[4876]: E1215 07:16:30.931458 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:30 crc kubenswrapper[4876]: E1215 07:16:30.931525 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts podName:d5c5b73f-18fa-4bef-b443-15328769a818 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:31.43150545 +0000 UTC m=+1517.002648361 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts") pod "neutrona915-account-delete-gswz2" (UID: "d5c5b73f-18fa-4bef-b443-15328769a818") : configmap "openstack-scripts" not found Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.932092 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.932145 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.932156 4876 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/769bb0f8-96fe-485a-adfd-e51747bbff86-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.939253 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/memcached-0" podUID="dd0403aa-07db-43b1-8df6-6317130cbd53" containerName="memcached" probeResult="failure" output="dial tcp 10.217.0.104:11211: connect: connection refused" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.941477 4876 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapic196-account-delete-j7dqh" secret="" err="secret \"galera-openstack-dockercfg-q4blc\" not found" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.945991 4876 generic.go:334] "Generic (PLEG): container finished" podID="23569e4d-9d69-4947-9293-50d1667c1eda" containerID="cef34266913f1af42f423b9d794de57144b0ceaffd39d3e744bc8fb3a3cb3355" exitCode=0 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.958686 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novaapic196-account-delete-j7dqh" podStartSLOduration=5.958666942 podStartE2EDuration="5.958666942s" podCreationTimestamp="2025-12-15 07:16:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:16:30.958457397 +0000 UTC m=+1516.529600318" watchObservedRunningTime="2025-12-15 07:16:30.958666942 +0000 UTC m=+1516.529809853" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.974720 4876 generic.go:334] "Generic (PLEG): container finished" podID="efef2015-cf60-49ff-b90d-f1120822806c" containerID="76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9" exitCode=0 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.974780 4876 generic.go:334] "Generic (PLEG): container finished" podID="efef2015-cf60-49ff-b90d-f1120822806c" containerID="3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0" exitCode=2 Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.985797 4876 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/barbican2a14-account-delete-mbwx7" secret="" err="secret \"galera-openstack-dockercfg-q4blc\" not found" Dec 15 07:16:30 crc kubenswrapper[4876]: I1215 07:16:30.993488 4876 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell0d7a8-account-delete-265rv" secret="" err="secret \"galera-openstack-dockercfg-q4blc\" not found" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.007361 4876 generic.go:334] "Generic (PLEG): container finished" podID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerID="cbf0f151002b45d509e231e069399143add0e99bc811fdc2edeef06787b39699" exitCode=0 Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.019469 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.024855 4876 generic.go:334] "Generic (PLEG): container finished" podID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerID="c47bf08f93b6c00bfb83bcd8d9986cba9e1dc9ea271a02a648096725810385b2" exitCode=0 Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025707 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025771 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-pr47l"] Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025785 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c","Type":"ContainerDied","Data":"0adedfb29f59a78ed0ca95dedd5200ded03f6c4a9258dcf41996ef3271579c3d"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025813 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8774998dd-ps8vx" event={"ID":"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774","Type":"ContainerDied","Data":"ab2b7bcd77dc096c6f8bd4c255e23eb26198e47b55d43f5eb7d82a47546b3194"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025849 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutrona915-account-delete-gswz2" event={"ID":"d5c5b73f-18fa-4bef-b443-15328769a818","Type":"ContainerStarted","Data":"d0d09eace30f3a8203157172430444b7a9b0b3b017388393bd092a8a917b1184"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025864 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-pr47l"] Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025876 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4f937a46-5b8e-4d1c-bd05-3b729ffb8188","Type":"ContainerDied","Data":"1cb73a4bf7ed52b3476b312fe18978e8dfe39c5143fc358708eff5808b066bce"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025898 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic196-account-delete-j7dqh" event={"ID":"478129d4-b31d-47e4-a6d4-b4aa7416356e","Type":"ContainerStarted","Data":"a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025931 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-64c85ddd54-vd84c" event={"ID":"23569e4d-9d69-4947-9293-50d1667c1eda","Type":"ContainerDied","Data":"cef34266913f1af42f423b9d794de57144b0ceaffd39d3e744bc8fb3a3cb3355"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025947 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efef2015-cf60-49ff-b90d-f1120822806c","Type":"ContainerDied","Data":"76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025959 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efef2015-cf60-49ff-b90d-f1120822806c","Type":"ContainerDied","Data":"3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025967 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican2a14-account-delete-mbwx7" event={"ID":"1501e554-af82-4179-959b-475a30b910d5","Type":"ContainerStarted","Data":"9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.025978 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0d7a8-account-delete-265rv" event={"ID":"bd3ee522-830b-4cde-ad14-15bcc28b7d06","Type":"ContainerStarted","Data":"4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.026011 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"83083084-55fd-4e95-87bf-bebcc2d41fb8","Type":"ContainerDied","Data":"cbf0f151002b45d509e231e069399143add0e99bc811fdc2edeef06787b39699"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.026032 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"7a01ad7f-0924-4b3d-ba95-b5e599f343ee","Type":"ContainerDied","Data":"4b7607f7d670caa301b8b88efaf50178856aeda97660a02a0661fcdd231cbc96"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.026044 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0","Type":"ContainerDied","Data":"c47bf08f93b6c00bfb83bcd8d9986cba9e1dc9ea271a02a648096725810385b2"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.031560 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7657c647b5-xksrd" event={"ID":"769bb0f8-96fe-485a-adfd-e51747bbff86","Type":"ContainerDied","Data":"ab9e566345df24e380959f893a007c698b900501f2115b87085d4c1e4ece3b84"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.031865 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7657c647b5-xksrd" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.036737 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.036797 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts podName:478129d4-b31d-47e4-a6d4-b4aa7416356e nodeName:}" failed. No retries permitted until 2025-12-15 07:16:31.536784601 +0000 UTC m=+1517.107927512 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts") pod "novaapic196-account-delete-j7dqh" (UID: "478129d4-b31d-47e4-a6d4-b4aa7416356e") : configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.037313 4876 scope.go:117] "RemoveContainer" containerID="7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.039620 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican2a14-account-delete-mbwx7" podStartSLOduration=7.039594497 podStartE2EDuration="7.039594497s" podCreationTimestamp="2025-12-15 07:16:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:16:31.00598255 +0000 UTC m=+1516.577125461" watchObservedRunningTime="2025-12-15 07:16:31.039594497 +0000 UTC m=+1516.610737408" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.039985 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell0d7a8-account-delete-265rv" podStartSLOduration=6.039979637 podStartE2EDuration="6.039979637s" podCreationTimestamp="2025-12-15 07:16:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:16:31.029149485 +0000 UTC m=+1516.600292396" watchObservedRunningTime="2025-12-15 07:16:31.039979637 +0000 UTC m=+1516.611122548" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.043281 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1\": container with ID starting with 7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1 not found: ID does not exist" containerID="7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.043434 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1"} err="failed to get container status \"7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1\": rpc error: code = NotFound desc = could not find container \"7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1\": container with ID starting with 7e218b31fee83d266ec13a864f3d7d18759dfade8c08826e97bb3964d943aeb1 not found: ID does not exist" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.043517 4876 scope.go:117] "RemoveContainer" containerID="5984af4749e3e3b071be6cbab85aa2dc4f95eb610ca9d34223b709f90b67f795" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.048487 4876 generic.go:334] "Generic (PLEG): container finished" podID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" containerID="c5bf69c338d2955649defd7611ec504382e5c3542edee5ca3f56d01332885ae6" exitCode=0 Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.048724 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1639e7c7-83c0-4a60-9f59-5b31772b9f35","Type":"ContainerDied","Data":"c5bf69c338d2955649defd7611ec504382e5c3542edee5ca3f56d01332885ae6"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.048788 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1639e7c7-83c0-4a60-9f59-5b31772b9f35","Type":"ContainerDied","Data":"84b1c1cdb1f7106884cf4f5ab085a85917d93186b5d58717c1abe18b0a027118"} Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.048804 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84b1c1cdb1f7106884cf4f5ab085a85917d93186b5d58717c1abe18b0a027118" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.051133 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.053537 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1" containerName="kube-state-metrics" containerID="cri-o://b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c" gracePeriod=30 Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.056324 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.056369 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.056600 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.061407 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.061608 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.061698 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.077785 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="8a65a162-9f80-4d1c-be2c-001dedcb5391" containerName="galera" containerID="cri-o://cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a" gracePeriod=30 Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.080146 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.095618 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.095700 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.138174 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-public-tls-certs\") pod \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.138232 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ws5vq\" (UniqueName: \"kubernetes.io/projected/1639e7c7-83c0-4a60-9f59-5b31772b9f35-kube-api-access-ws5vq\") pod \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.138269 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-httpd-run\") pod \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.138290 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-config-data\") pod \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.138355 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-scripts\") pod \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.138416 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-logs\") pod \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.138494 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-combined-ca-bundle\") pod \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.138522 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\" (UID: \"1639e7c7-83c0-4a60-9f59-5b31772b9f35\") " Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.139045 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.139123 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts podName:1501e554-af82-4179-959b-475a30b910d5 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:31.639090752 +0000 UTC m=+1517.210233663 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts") pod "barbican2a14-account-delete-mbwx7" (UID: "1501e554-af82-4179-959b-475a30b910d5") : configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.139674 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1639e7c7-83c0-4a60-9f59-5b31772b9f35" (UID: "1639e7c7-83c0-4a60-9f59-5b31772b9f35"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.139864 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.139926 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts podName:bd3ee522-830b-4cde-ad14-15bcc28b7d06 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:31.639908125 +0000 UTC m=+1517.211051036 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts") pod "novacell0d7a8-account-delete-265rv" (UID: "bd3ee522-830b-4cde-ad14-15bcc28b7d06") : configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.141218 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-logs" (OuterVolumeSpecName: "logs") pod "1639e7c7-83c0-4a60-9f59-5b31772b9f35" (UID: "1639e7c7-83c0-4a60-9f59-5b31772b9f35"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.149266 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1639e7c7-83c0-4a60-9f59-5b31772b9f35-kube-api-access-ws5vq" (OuterVolumeSpecName: "kube-api-access-ws5vq") pod "1639e7c7-83c0-4a60-9f59-5b31772b9f35" (UID: "1639e7c7-83c0-4a60-9f59-5b31772b9f35"). InnerVolumeSpecName "kube-api-access-ws5vq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.152419 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-scripts" (OuterVolumeSpecName: "scripts") pod "1639e7c7-83c0-4a60-9f59-5b31772b9f35" (UID: "1639e7c7-83c0-4a60-9f59-5b31772b9f35"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.155486 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "1639e7c7-83c0-4a60-9f59-5b31772b9f35" (UID: "1639e7c7-83c0-4a60-9f59-5b31772b9f35"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.198627 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1639e7c7-83c0-4a60-9f59-5b31772b9f35" (UID: "1639e7c7-83c0-4a60-9f59-5b31772b9f35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.212311 4876 scope.go:117] "RemoveContainer" containerID="d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.227666 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-config-data" (OuterVolumeSpecName: "config-data") pod "1639e7c7-83c0-4a60-9f59-5b31772b9f35" (UID: "1639e7c7-83c0-4a60-9f59-5b31772b9f35"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.230505 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1639e7c7-83c0-4a60-9f59-5b31772b9f35" (UID: "1639e7c7-83c0-4a60-9f59-5b31772b9f35"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.244409 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-combined-ca-bundle\") pod \"83083084-55fd-4e95-87bf-bebcc2d41fb8\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.244559 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-config-data\") pod \"83083084-55fd-4e95-87bf-bebcc2d41fb8\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.244700 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83083084-55fd-4e95-87bf-bebcc2d41fb8-logs\") pod \"83083084-55fd-4e95-87bf-bebcc2d41fb8\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.245153 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmx8z\" (UniqueName: \"kubernetes.io/projected/83083084-55fd-4e95-87bf-bebcc2d41fb8-kube-api-access-zmx8z\") pod \"83083084-55fd-4e95-87bf-bebcc2d41fb8\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.245237 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-nova-metadata-tls-certs\") pod \"83083084-55fd-4e95-87bf-bebcc2d41fb8\" (UID: \"83083084-55fd-4e95-87bf-bebcc2d41fb8\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.247795 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83083084-55fd-4e95-87bf-bebcc2d41fb8-logs" (OuterVolumeSpecName: "logs") pod "83083084-55fd-4e95-87bf-bebcc2d41fb8" (UID: "83083084-55fd-4e95-87bf-bebcc2d41fb8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.251023 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.251158 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.251179 4876 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.251194 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ws5vq\" (UniqueName: \"kubernetes.io/projected/1639e7c7-83c0-4a60-9f59-5b31772b9f35-kube-api-access-ws5vq\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.251237 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.251228 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83083084-55fd-4e95-87bf-bebcc2d41fb8-kube-api-access-zmx8z" (OuterVolumeSpecName: "kube-api-access-zmx8z") pod "83083084-55fd-4e95-87bf-bebcc2d41fb8" (UID: "83083084-55fd-4e95-87bf-bebcc2d41fb8"). InnerVolumeSpecName "kube-api-access-zmx8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.251251 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.251309 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1639e7c7-83c0-4a60-9f59-5b31772b9f35-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.251321 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/83083084-55fd-4e95-87bf-bebcc2d41fb8-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.251331 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1639e7c7-83c0-4a60-9f59-5b31772b9f35-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.270411 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-config-data" (OuterVolumeSpecName: "config-data") pod "83083084-55fd-4e95-87bf-bebcc2d41fb8" (UID: "83083084-55fd-4e95-87bf-bebcc2d41fb8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.279095 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.308457 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83083084-55fd-4e95-87bf-bebcc2d41fb8" (UID: "83083084-55fd-4e95-87bf-bebcc2d41fb8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.338673 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "83083084-55fd-4e95-87bf-bebcc2d41fb8" (UID: "83083084-55fd-4e95-87bf-bebcc2d41fb8"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.354213 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmx8z\" (UniqueName: \"kubernetes.io/projected/83083084-55fd-4e95-87bf-bebcc2d41fb8-kube-api-access-zmx8z\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.354253 4876 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.354263 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.354277 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83083084-55fd-4e95-87bf-bebcc2d41fb8-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.354288 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.398046 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.415907 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.418544 4876 scope.go:117] "RemoveContainer" containerID="d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.423609 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489\": container with ID starting with d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489 not found: ID does not exist" containerID="d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.423666 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489"} err="failed to get container status \"d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489\": rpc error: code = NotFound desc = could not find container \"d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489\": container with ID starting with d05504c99b3b0e84578e9bea6fa1859a3894d5a80a5a1d863ed78c8605a78489 not found: ID does not exist" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.423712 4876 scope.go:117] "RemoveContainer" containerID="da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.445322 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.454873 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data-custom\") pod \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.454954 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-etc-machine-id\") pod \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.455018 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data\") pod \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.455132 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-logs\") pod \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.455167 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-combined-ca-bundle\") pod \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.455246 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts\") pod \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.455291 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-public-tls-certs\") pod \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.455314 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-internal-tls-certs\") pod \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.455361 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzsm2\" (UniqueName: \"kubernetes.io/projected/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-kube-api-access-kzsm2\") pod \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\" (UID: \"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c\") " Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.455891 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.455958 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts podName:d5c5b73f-18fa-4bef-b443-15328769a818 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:32.455942133 +0000 UTC m=+1518.027085044 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts") pod "neutrona915-account-delete-gswz2" (UID: "d5c5b73f-18fa-4bef-b443-15328769a818") : configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.456148 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-logs" (OuterVolumeSpecName: "logs") pod "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.459976 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.475771 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.486498 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-kube-api-access-kzsm2" (OuterVolumeSpecName: "kube-api-access-kzsm2") pod "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c"). InnerVolumeSpecName "kube-api-access-kzsm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.487173 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts" (OuterVolumeSpecName: "scripts") pod "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.490633 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.499997 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.504282 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.508099 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.513696 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-7657c647b5-xksrd"] Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.520807 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-7657c647b5-xksrd"] Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.529778 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.551176 4876 scope.go:117] "RemoveContainer" containerID="4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557191 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-internal-tls-certs\") pod \"23569e4d-9d69-4947-9293-50d1667c1eda\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557272 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-combined-ca-bundle\") pod \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557335 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557395 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-internal-tls-certs\") pod \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557433 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-scripts\") pod \"23569e4d-9d69-4947-9293-50d1667c1eda\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557502 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-config-data\") pod \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557550 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-logs\") pod \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557578 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq924\" (UniqueName: \"kubernetes.io/projected/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-kube-api-access-fq924\") pod \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557666 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-internal-tls-certs\") pod \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557748 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-logs\") pod \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557830 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ghhv\" (UniqueName: \"kubernetes.io/projected/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-kube-api-access-2ghhv\") pod \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.557992 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz82f\" (UniqueName: \"kubernetes.io/projected/23569e4d-9d69-4947-9293-50d1667c1eda-kube-api-access-qz82f\") pod \"23569e4d-9d69-4947-9293-50d1667c1eda\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.563160 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-logs" (OuterVolumeSpecName: "logs") pod "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" (UID: "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.563680 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-logs" (OuterVolumeSpecName: "logs") pod "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" (UID: "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.569419 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "4f937a46-5b8e-4d1c-bd05-3b729ffb8188" (UID: "4f937a46-5b8e-4d1c-bd05-3b729ffb8188"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.571584 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.573687 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574227 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-logs\") pod \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574273 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data-custom\") pod \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574306 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-httpd-run\") pod \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574342 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-config-data\") pod \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574370 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-combined-ca-bundle\") pod \"23569e4d-9d69-4947-9293-50d1667c1eda\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574419 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-combined-ca-bundle\") pod \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574491 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-public-tls-certs\") pod \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574515 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-combined-ca-bundle\") pod \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574541 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-public-tls-certs\") pod \"23569e4d-9d69-4947-9293-50d1667c1eda\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574575 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-internal-tls-certs\") pod \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\" (UID: \"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574617 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-config-data\") pod \"23569e4d-9d69-4947-9293-50d1667c1eda\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574647 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23569e4d-9d69-4947-9293-50d1667c1eda-logs\") pod \"23569e4d-9d69-4947-9293-50d1667c1eda\" (UID: \"23569e4d-9d69-4947-9293-50d1667c1eda\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574681 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dhnx\" (UniqueName: \"kubernetes.io/projected/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-kube-api-access-8dhnx\") pod \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574720 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data\") pod \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574743 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-scripts\") pod \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\" (UID: \"4f937a46-5b8e-4d1c-bd05-3b729ffb8188\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.574783 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-public-tls-certs\") pod \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\" (UID: \"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.576442 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23569e4d-9d69-4947-9293-50d1667c1eda-kube-api-access-qz82f" (OuterVolumeSpecName: "kube-api-access-qz82f") pod "23569e4d-9d69-4947-9293-50d1667c1eda" (UID: "23569e4d-9d69-4947-9293-50d1667c1eda"). InnerVolumeSpecName "kube-api-access-qz82f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.576681 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.576971 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts podName:478129d4-b31d-47e4-a6d4-b4aa7416356e nodeName:}" failed. No retries permitted until 2025-12-15 07:16:32.57694207 +0000 UTC m=+1518.148085051 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts") pod "novaapic196-account-delete-j7dqh" (UID: "478129d4-b31d-47e4-a6d4-b4aa7416356e") : configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.576621 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577538 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577631 4876 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577715 4876 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577771 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-scripts" (OuterVolumeSpecName: "scripts") pod "23569e4d-9d69-4947-9293-50d1667c1eda" (UID: "23569e4d-9d69-4947-9293-50d1667c1eda"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577797 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577853 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzsm2\" (UniqueName: \"kubernetes.io/projected/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-kube-api-access-kzsm2\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577868 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577880 4876 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577889 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577903 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.577930 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.580864 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-logs" (OuterVolumeSpecName: "logs") pod "4f937a46-5b8e-4d1c-bd05-3b729ffb8188" (UID: "4f937a46-5b8e-4d1c-bd05-3b729ffb8188"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.581936 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4f937a46-5b8e-4d1c-bd05-3b729ffb8188" (UID: "4f937a46-5b8e-4d1c-bd05-3b729ffb8188"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.582033 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23569e4d-9d69-4947-9293-50d1667c1eda-logs" (OuterVolumeSpecName: "logs") pod "23569e4d-9d69-4947-9293-50d1667c1eda" (UID: "23569e4d-9d69-4947-9293-50d1667c1eda"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.590615 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-kube-api-access-fq924" (OuterVolumeSpecName: "kube-api-access-fq924") pod "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" (UID: "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0"). InnerVolumeSpecName "kube-api-access-fq924". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.610847 4876 scope.go:117] "RemoveContainer" containerID="da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.616704 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787\": container with ID starting with da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787 not found: ID does not exist" containerID="da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.616757 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787"} err="failed to get container status \"da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787\": rpc error: code = NotFound desc = could not find container \"da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787\": container with ID starting with da02f14d88cfd6dc98cc98116f89b5480318272188ffd05e2273a30bf40c7787 not found: ID does not exist" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.616794 4876 scope.go:117] "RemoveContainer" containerID="4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.618845 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" (UID: "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.619236 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-kube-api-access-8dhnx" (OuterVolumeSpecName: "kube-api-access-8dhnx") pod "4f937a46-5b8e-4d1c-bd05-3b729ffb8188" (UID: "4f937a46-5b8e-4d1c-bd05-3b729ffb8188"). InnerVolumeSpecName "kube-api-access-8dhnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.621016 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-scripts" (OuterVolumeSpecName: "scripts") pod "4f937a46-5b8e-4d1c-bd05-3b729ffb8188" (UID: "4f937a46-5b8e-4d1c-bd05-3b729ffb8188"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.621320 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac\": container with ID starting with 4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac not found: ID does not exist" containerID="4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.621353 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac"} err="failed to get container status \"4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac\": rpc error: code = NotFound desc = could not find container \"4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac\": container with ID starting with 4f61712cc5d0a938e8145fd61f8baf621bfd380af98312fe4ed5ba25fe9725ac not found: ID does not exist" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.621390 4876 scope.go:117] "RemoveContainer" containerID="3bb0efb06fe07164b0691cb647be7ce8ec5dd48a52e915accc7b411331387951" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.652517 4876 scope.go:117] "RemoveContainer" containerID="39f90703ccba40f7e2a435957a70f4bff9580120e427f263883cc1f2fc48d1e8" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.680390 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" (UID: "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.687571 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-config-data" (OuterVolumeSpecName: "config-data") pod "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" (UID: "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.691219 4876 scope.go:117] "RemoveContainer" containerID="476fd12dd3cb5e2c27a25b26e39e28f8bb6e1c10090223c4ed86166489cea477" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.695018 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-kube-api-access-2ghhv" (OuterVolumeSpecName: "kube-api-access-2ghhv") pod "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" (UID: "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774"). InnerVolumeSpecName "kube-api-access-2ghhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703140 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/23569e4d-9d69-4947-9293-50d1667c1eda-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703170 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dhnx\" (UniqueName: \"kubernetes.io/projected/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-kube-api-access-8dhnx\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703179 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703187 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703196 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703212 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703221 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq924\" (UniqueName: \"kubernetes.io/projected/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-kube-api-access-fq924\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703229 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ghhv\" (UniqueName: \"kubernetes.io/projected/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-kube-api-access-2ghhv\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703238 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz82f\" (UniqueName: \"kubernetes.io/projected/23569e4d-9d69-4947-9293-50d1667c1eda-kube-api-access-qz82f\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703250 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703258 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.703267 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.703308 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.703329 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.703398 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts podName:1501e554-af82-4179-959b-475a30b910d5 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:32.703366511 +0000 UTC m=+1518.274509412 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts") pod "barbican2a14-account-delete-mbwx7" (UID: "1501e554-af82-4179-959b-475a30b910d5") : configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: E1215 07:16:31.703419 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts podName:bd3ee522-830b-4cde-ad14-15bcc28b7d06 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:32.703411342 +0000 UTC m=+1518.274554253 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts") pod "novacell0d7a8-account-delete-265rv" (UID: "bd3ee522-830b-4cde-ad14-15bcc28b7d06") : configmap "openstack-scripts" not found Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.780528 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.814639 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance2c9d-account-delete-jqhxv" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.834071 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frhbb\" (UniqueName: \"kubernetes.io/projected/d24022bd-28ab-402d-9078-c52208891ef8-kube-api-access-frhbb\") pod \"d24022bd-28ab-402d-9078-c52208891ef8\" (UID: \"d24022bd-28ab-402d-9078-c52208891ef8\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.834202 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24022bd-28ab-402d-9078-c52208891ef8-operator-scripts\") pod \"d24022bd-28ab-402d-9078-c52208891ef8\" (UID: \"d24022bd-28ab-402d-9078-c52208891ef8\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.835262 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.838316 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d24022bd-28ab-402d-9078-c52208891ef8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d24022bd-28ab-402d-9078-c52208891ef8" (UID: "d24022bd-28ab-402d-9078-c52208891ef8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.843512 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data" (OuterVolumeSpecName: "config-data") pod "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" (UID: "bcf47e8b-49d3-45cb-a496-b3a5a256cc5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.847306 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f937a46-5b8e-4d1c-bd05-3b729ffb8188" (UID: "4f937a46-5b8e-4d1c-bd05-3b729ffb8188"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.874194 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d24022bd-28ab-402d-9078-c52208891ef8-kube-api-access-frhbb" (OuterVolumeSpecName: "kube-api-access-frhbb") pod "d24022bd-28ab-402d-9078-c52208891ef8" (UID: "d24022bd-28ab-402d-9078-c52208891ef8"). InnerVolumeSpecName "kube-api-access-frhbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.918741 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.936346 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdnqn\" (UniqueName: \"kubernetes.io/projected/79b75337-8832-415e-a91f-2f8edd407cf1-kube-api-access-jdnqn\") pod \"79b75337-8832-415e-a91f-2f8edd407cf1\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.936419 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data\") pod \"79b75337-8832-415e-a91f-2f8edd407cf1\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.936569 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79b75337-8832-415e-a91f-2f8edd407cf1-etc-machine-id\") pod \"79b75337-8832-415e-a91f-2f8edd407cf1\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.936612 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-scripts\") pod \"79b75337-8832-415e-a91f-2f8edd407cf1\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.936699 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data-custom\") pod \"79b75337-8832-415e-a91f-2f8edd407cf1\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.936753 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-combined-ca-bundle\") pod \"79b75337-8832-415e-a91f-2f8edd407cf1\" (UID: \"79b75337-8832-415e-a91f-2f8edd407cf1\") " Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.937161 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frhbb\" (UniqueName: \"kubernetes.io/projected/d24022bd-28ab-402d-9078-c52208891ef8-kube-api-access-frhbb\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.937178 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d24022bd-28ab-402d-9078-c52208891ef8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.937191 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.937203 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.937395 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/79b75337-8832-415e-a91f-2f8edd407cf1-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "79b75337-8832-415e-a91f-2f8edd407cf1" (UID: "79b75337-8832-415e-a91f-2f8edd407cf1"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.942629 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinderfc60-account-delete-vg9nk" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.956056 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "79b75337-8832-415e-a91f-2f8edd407cf1" (UID: "79b75337-8832-415e-a91f-2f8edd407cf1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.964529 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-config-data" (OuterVolumeSpecName: "config-data") pod "23569e4d-9d69-4947-9293-50d1667c1eda" (UID: "23569e4d-9d69-4947-9293-50d1667c1eda"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.967090 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement4aed-account-delete-jwsnh" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.990544 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79b75337-8832-415e-a91f-2f8edd407cf1-kube-api-access-jdnqn" (OuterVolumeSpecName: "kube-api-access-jdnqn") pod "79b75337-8832-415e-a91f-2f8edd407cf1" (UID: "79b75337-8832-415e-a91f-2f8edd407cf1"). InnerVolumeSpecName "kube-api-access-jdnqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:31 crc kubenswrapper[4876]: I1215 07:16:31.991511 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-scripts" (OuterVolumeSpecName: "scripts") pod "79b75337-8832-415e-a91f-2f8edd407cf1" (UID: "79b75337-8832-415e-a91f-2f8edd407cf1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.000200 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.042142 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-operator-scripts\") pod \"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71\" (UID: \"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.042216 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-config\") pod \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.042255 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-operator-scripts\") pod \"233c0ff5-7edd-41c4-8e16-7de48b9fc76c\" (UID: \"233c0ff5-7edd-41c4-8e16-7de48b9fc76c\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.042307 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbtzd\" (UniqueName: \"kubernetes.io/projected/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-api-access-jbtzd\") pod \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.042358 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-certs\") pod \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.042393 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-combined-ca-bundle\") pod \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\" (UID: \"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.042526 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86xkg\" (UniqueName: \"kubernetes.io/projected/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-kube-api-access-86xkg\") pod \"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71\" (UID: \"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.042556 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dv2d\" (UniqueName: \"kubernetes.io/projected/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-kube-api-access-6dv2d\") pod \"233c0ff5-7edd-41c4-8e16-7de48b9fc76c\" (UID: \"233c0ff5-7edd-41c4-8e16-7de48b9fc76c\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.043184 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.043225 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.043236 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdnqn\" (UniqueName: \"kubernetes.io/projected/79b75337-8832-415e-a91f-2f8edd407cf1-kube-api-access-jdnqn\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.043247 4876 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/79b75337-8832-415e-a91f-2f8edd407cf1-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.043257 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.055144 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71" (UID: "e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.055645 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "233c0ff5-7edd-41c4-8e16-7de48b9fc76c" (UID: "233c0ff5-7edd-41c4-8e16-7de48b9fc76c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.070281 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-kube-api-access-6dv2d" (OuterVolumeSpecName: "kube-api-access-6dv2d") pod "233c0ff5-7edd-41c4-8e16-7de48b9fc76c" (UID: "233c0ff5-7edd-41c4-8e16-7de48b9fc76c"). InnerVolumeSpecName "kube-api-access-6dv2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.071272 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-api-access-jbtzd" (OuterVolumeSpecName: "kube-api-access-jbtzd") pod "39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1" (UID: "39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1"). InnerVolumeSpecName "kube-api-access-jbtzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.087520 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-kube-api-access-86xkg" (OuterVolumeSpecName: "kube-api-access-86xkg") pod "e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71" (UID: "e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71"). InnerVolumeSpecName "kube-api-access-86xkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.089853 4876 generic.go:334] "Generic (PLEG): container finished" podID="39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1" containerID="b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c" exitCode=2 Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.089926 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1","Type":"ContainerDied","Data":"b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.089953 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1","Type":"ContainerDied","Data":"56a4d3eb557314a531f157c23561cd2f095d0d9f484d0172a4eeb015f9fd4a5d"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.090019 4876 scope.go:117] "RemoveContainer" containerID="b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.090143 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.134573 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.198:3000/\": dial tcp 10.217.0.198:3000: connect: connection refused" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.136190 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement4aed-account-delete-jwsnh" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.136191 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement4aed-account-delete-jwsnh" event={"ID":"e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71","Type":"ContainerDied","Data":"bcb218540fc6cdde8ce41d1a56c550b5201a8636db083165b266cd207dbbcd67"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.136229 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bcb218540fc6cdde8ce41d1a56c550b5201a8636db083165b266cd207dbbcd67" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.147847 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86xkg\" (UniqueName: \"kubernetes.io/projected/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-kube-api-access-86xkg\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.147842 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4f937a46-5b8e-4d1c-bd05-3b729ffb8188","Type":"ContainerDied","Data":"fd802d888676953a87165e66a9bd7497052ebe529b4b735a20d73e66e0777c08"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.147882 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dv2d\" (UniqueName: \"kubernetes.io/projected/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-kube-api-access-6dv2d\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.147947 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.147961 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/233c0ff5-7edd-41c4-8e16-7de48b9fc76c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.147974 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbtzd\" (UniqueName: \"kubernetes.io/projected/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-api-access-jbtzd\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.148032 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.158661 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-config-data" (OuterVolumeSpecName: "config-data") pod "4f937a46-5b8e-4d1c-bd05-3b729ffb8188" (UID: "4f937a46-5b8e-4d1c-bd05-3b729ffb8188"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.160128 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc226c84-f3f6-47dc-ba09-3a79dd00e5d0","Type":"ContainerDied","Data":"754597aeae041d754951a2abf4d6bfa1c686422b63743e07a650429860fd06d5"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.160716 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.167158 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" (UID: "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.173302 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"dd0403aa-07db-43b1-8df6-6317130cbd53","Type":"ContainerDied","Data":"90ed53ef014d70db18f442d59174c6772f199e8d4213675027ef5c79f6be1513"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.173719 4876 scope.go:117] "RemoveContainer" containerID="b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.174166 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c\": container with ID starting with b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c not found: ID does not exist" containerID="b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.174167 4876 generic.go:334] "Generic (PLEG): container finished" podID="dd0403aa-07db-43b1-8df6-6317130cbd53" containerID="90ed53ef014d70db18f442d59174c6772f199e8d4213675027ef5c79f6be1513" exitCode=0 Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.174210 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c"} err="failed to get container status \"b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c\": rpc error: code = NotFound desc = could not find container \"b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c\": container with ID starting with b2fff834a4096d326d632e7348e4886f9af85e3ce176a635eab5cf623968ba4c not found: ID does not exist" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.174236 4876 scope.go:117] "RemoveContainer" containerID="1cb73a4bf7ed52b3476b312fe18978e8dfe39c5143fc358708eff5808b066bce" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.190836 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" (UID: "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.191360 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.191361 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bcf47e8b-49d3-45cb-a496-b3a5a256cc5c","Type":"ContainerDied","Data":"20f681bac97da50dac7df49648f99624d7612e3cbad3be4f3eb9482344be9566"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.193680 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance2c9d-account-delete-jqhxv" event={"ID":"d24022bd-28ab-402d-9078-c52208891ef8","Type":"ContainerDied","Data":"2d51fea6c33018b2ce10b6d031602046c199329f7659519f8190c59f711d5100"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.193717 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d51fea6c33018b2ce10b6d031602046c199329f7659519f8190c59f711d5100" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.193799 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance2c9d-account-delete-jqhxv" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.200290 4876 generic.go:334] "Generic (PLEG): container finished" podID="79b75337-8832-415e-a91f-2f8edd407cf1" containerID="b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783" exitCode=0 Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.200367 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"79b75337-8832-415e-a91f-2f8edd407cf1","Type":"ContainerDied","Data":"b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.200403 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"79b75337-8832-415e-a91f-2f8edd407cf1","Type":"ContainerDied","Data":"5dce8b66da2c593d37c1adf1f95b57024e34e70055c7bf421ab38aaa9fe488ea"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.200505 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.202753 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8774998dd-ps8vx" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.202751 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8774998dd-ps8vx" event={"ID":"3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774","Type":"ContainerDied","Data":"83d2138d13a0ffbdeaef69ef7b046714395ed24e1202d23adc8389cc650b351f"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.214882 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinderfc60-account-delete-vg9nk" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.214908 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderfc60-account-delete-vg9nk" event={"ID":"233c0ff5-7edd-41c4-8e16-7de48b9fc76c","Type":"ContainerDied","Data":"8276da630e544256d45873d05827f28ba9bafa519d18620c65e9e87192ea5b40"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.214961 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8276da630e544256d45873d05827f28ba9bafa519d18620c65e9e87192ea5b40" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.217500 4876 scope.go:117] "RemoveContainer" containerID="77739227b2e4df94b91265646c063d9a27732d0f675c7fd40830a4b54d3a0c86" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.221345 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" (UID: "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.221691 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"83083084-55fd-4e95-87bf-bebcc2d41fb8","Type":"ContainerDied","Data":"9f94543edadc892f0dac7a43a7361d73c20fc0fd0776d3e250d7e03db91feefe"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.221785 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.238396 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-64c85ddd54-vd84c" event={"ID":"23569e4d-9d69-4947-9293-50d1667c1eda","Type":"ContainerDied","Data":"b7cee017897c12539c9299fd75ae6631f0e2df1da620d7ed092d9f4e6c534e80"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.238603 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-64c85ddd54-vd84c" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.244686 4876 generic.go:334] "Generic (PLEG): container finished" podID="efef2015-cf60-49ff-b90d-f1120822806c" containerID="34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1" exitCode=0 Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.244801 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efef2015-cf60-49ff-b90d-f1120822806c","Type":"ContainerDied","Data":"34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1"} Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.244878 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.246326 4876 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell0d7a8-account-delete-265rv" secret="" err="secret \"galera-openstack-dockercfg-q4blc\" not found" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.246428 4876 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/neutrona915-account-delete-gswz2" secret="" err="secret \"galera-openstack-dockercfg-q4blc\" not found" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.247131 4876 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapic196-account-delete-j7dqh" secret="" err="secret \"galera-openstack-dockercfg-q4blc\" not found" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.251064 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.251225 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.251241 4876 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.251255 4876 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.251498 4876 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.252805 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data podName:db3f4964-0cca-4527-93de-457292de4be7 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:40.251541107 +0000 UTC m=+1525.822684028 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data") pod "rabbitmq-server-0" (UID: "db3f4964-0cca-4527-93de-457292de4be7") : configmap "rabbitmq-config-data" not found Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.267676 4876 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/barbican2a14-account-delete-mbwx7" secret="" err="secret \"galera-openstack-dockercfg-q4blc\" not found" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.301869 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data" (OuterVolumeSpecName: "config-data") pod "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" (UID: "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.301902 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4f937a46-5b8e-4d1c-bd05-3b729ffb8188" (UID: "4f937a46-5b8e-4d1c-bd05-3b729ffb8188"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.305193 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1" (UID: "39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.324555 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" (UID: "cc226c84-f3f6-47dc-ba09-3a79dd00e5d0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.337897 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1" (UID: "39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.353652 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.353681 4876 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f937a46-5b8e-4d1c-bd05-3b729ffb8188-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.353690 4876 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.353701 4876 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.353710 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.353814 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" (UID: "3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.364279 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23569e4d-9d69-4947-9293-50d1667c1eda" (UID: "23569e4d-9d69-4947-9293-50d1667c1eda"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.366070 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3eb0262_ff07_4e0a_8e3b_9b147ccf7e71.slice\": RecentStats: unable to find data in memory cache]" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.369247 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1" (UID: "39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.408648 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "23569e4d-9d69-4947-9293-50d1667c1eda" (UID: "23569e4d-9d69-4947-9293-50d1667c1eda"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.419191 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79b75337-8832-415e-a91f-2f8edd407cf1" (UID: "79b75337-8832-415e-a91f-2f8edd407cf1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.424737 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data" (OuterVolumeSpecName: "config-data") pod "79b75337-8832-415e-a91f-2f8edd407cf1" (UID: "79b75337-8832-415e-a91f-2f8edd407cf1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.448116 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "23569e4d-9d69-4947-9293-50d1667c1eda" (UID: "23569e4d-9d69-4947-9293-50d1667c1eda"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.457029 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.457056 4876 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.457067 4876 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.457080 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.457090 4876 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.457098 4876 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23569e4d-9d69-4947-9293-50d1667c1eda-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.457124 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79b75337-8832-415e-a91f-2f8edd407cf1-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.457187 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.457247 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts podName:d5c5b73f-18fa-4bef-b443-15328769a818 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:34.457230968 +0000 UTC m=+1520.028373879 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts") pod "neutrona915-account-delete-gswz2" (UID: "d5c5b73f-18fa-4bef-b443-15328769a818") : configmap "openstack-scripts" not found Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.529892 4876 scope.go:117] "RemoveContainer" containerID="c47bf08f93b6c00bfb83bcd8d9986cba9e1dc9ea271a02a648096725810385b2" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.545348 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.570301 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.578290 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.591744 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.591961 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" containerName="nova-cell1-conductor-conductor" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.592371 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.592678 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.608420 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.614493 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.622339 4876 scope.go:117] "RemoveContainer" containerID="673940f40218b378a3a021cc4f2020bd1bf2de102f6afab63a132d17f98cd088" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.641197 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.656222 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.659574 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-config-data\") pod \"dd0403aa-07db-43b1-8df6-6317130cbd53\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.659744 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-combined-ca-bundle\") pod \"dd0403aa-07db-43b1-8df6-6317130cbd53\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.659949 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-kolla-config\") pod \"dd0403aa-07db-43b1-8df6-6317130cbd53\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.660024 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-memcached-tls-certs\") pod \"dd0403aa-07db-43b1-8df6-6317130cbd53\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.660173 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjvhq\" (UniqueName: \"kubernetes.io/projected/dd0403aa-07db-43b1-8df6-6317130cbd53-kube-api-access-gjvhq\") pod \"dd0403aa-07db-43b1-8df6-6317130cbd53\" (UID: \"dd0403aa-07db-43b1-8df6-6317130cbd53\") " Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.660617 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.663325 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts podName:478129d4-b31d-47e4-a6d4-b4aa7416356e nodeName:}" failed. No retries permitted until 2025-12-15 07:16:34.66330726 +0000 UTC m=+1520.234450171 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts") pod "novaapic196-account-delete-j7dqh" (UID: "478129d4-b31d-47e4-a6d4-b4aa7416356e") : configmap "openstack-scripts" not found Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.663642 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.666946 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-config-data" (OuterVolumeSpecName: "config-data") pod "dd0403aa-07db-43b1-8df6-6317130cbd53" (UID: "dd0403aa-07db-43b1-8df6-6317130cbd53"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.674577 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "dd0403aa-07db-43b1-8df6-6317130cbd53" (UID: "dd0403aa-07db-43b1-8df6-6317130cbd53"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.684273 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.691270 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.692779 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd0403aa-07db-43b1-8df6-6317130cbd53-kube-api-access-gjvhq" (OuterVolumeSpecName: "kube-api-access-gjvhq") pod "dd0403aa-07db-43b1-8df6-6317130cbd53" (UID: "dd0403aa-07db-43b1-8df6-6317130cbd53"). InnerVolumeSpecName "kube-api-access-gjvhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.693152 4876 scope.go:117] "RemoveContainer" containerID="0adedfb29f59a78ed0ca95dedd5200ded03f6c4a9258dcf41996ef3271579c3d" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.813352 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="db3f4964-0cca-4527-93de-457292de4be7" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.815594 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjvhq\" (UniqueName: \"kubernetes.io/projected/dd0403aa-07db-43b1-8df6-6317130cbd53-kube-api-access-gjvhq\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.819172 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.819303 4876 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/dd0403aa-07db-43b1-8df6-6317130cbd53-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.820993 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.897058 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts podName:1501e554-af82-4179-959b-475a30b910d5 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:34.897028728 +0000 UTC m=+1520.468171639 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts") pod "barbican2a14-account-delete-mbwx7" (UID: "1501e554-af82-4179-959b-475a30b910d5") : configmap "openstack-scripts" not found Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.897119 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd0403aa-07db-43b1-8df6-6317130cbd53" (UID: "dd0403aa-07db-43b1-8df6-6317130cbd53"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.830602 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.898063 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts podName:bd3ee522-830b-4cde-ad14-15bcc28b7d06 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:34.898046184 +0000 UTC m=+1520.469189095 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts") pod "novacell0d7a8-account-delete-265rv" (UID: "bd3ee522-830b-4cde-ad14-15bcc28b7d06") : configmap "openstack-scripts" not found Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.897295 4876 scope.go:117] "RemoveContainer" containerID="13335ca52da2d503c57740b99d9daf374ffc5018a3326ec34ac01d5b52c7213e" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.837542 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1" path="/var/lib/kubelet/pods/39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1/volumes" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.903077 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.926879 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" path="/var/lib/kubelet/pods/4f937a46-5b8e-4d1c-bd05-3b729ffb8188/volumes" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.928434 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="769bb0f8-96fe-485a-adfd-e51747bbff86" path="/var/lib/kubelet/pods/769bb0f8-96fe-485a-adfd-e51747bbff86/volumes" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.942059 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a01ad7f-0924-4b3d-ba95-b5e599f343ee" path="/var/lib/kubelet/pods/7a01ad7f-0924-4b3d-ba95-b5e599f343ee/volumes" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.954154 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" path="/var/lib/kubelet/pods/ba4acd07-a5cb-47a7-9f1f-0bc818d3f738/volumes" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.954487 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.954585 4876 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.954643 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data podName:2a1020a1-7afe-46ee-b5c4-40a9290a05e1 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:40.954627112 +0000 UTC m=+1526.525770023 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data") pod "rabbitmq-cell1-server-0" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1") : configmap "rabbitmq-cell1-config-data" not found Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.961071 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" path="/var/lib/kubelet/pods/bcf47e8b-49d3-45cb-a496-b3a5a256cc5c/volumes" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.965131 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.966196 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" path="/var/lib/kubelet/pods/cc226c84-f3f6-47dc-ba09-3a79dd00e5d0/volumes" Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.966591 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 15 07:16:32 crc kubenswrapper[4876]: E1215 07:16:32.966664 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerName="ovn-northd" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.967775 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df0aba13-046d-4950-bfa3-c873c535847f" path="/var/lib/kubelet/pods/df0aba13-046d-4950-bfa3-c873c535847f/volumes" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.969716 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "dd0403aa-07db-43b1-8df6-6317130cbd53" (UID: "dd0403aa-07db-43b1-8df6-6317130cbd53"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.972487 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.972535 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:16:32 crc kubenswrapper[4876]: I1215 07:16:32.972550 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.000385 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-64c85ddd54-vd84c"] Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.024315 4876 scope.go:117] "RemoveContainer" containerID="4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.039204 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-64c85ddd54-vd84c"] Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.055846 4876 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0403aa-07db-43b1-8df6-6317130cbd53-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.063524 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-8774998dd-ps8vx"] Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.072822 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-8774998dd-ps8vx"] Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.073246 4876 scope.go:117] "RemoveContainer" containerID="b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.079238 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.089621 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.112934 4876 scope.go:117] "RemoveContainer" containerID="4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.115859 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="2a1020a1-7afe-46ee-b5c4-40a9290a05e1" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Dec 15 07:16:33 crc kubenswrapper[4876]: E1215 07:16:33.122674 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91\": container with ID starting with 4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91 not found: ID does not exist" containerID="4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.122724 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91"} err="failed to get container status \"4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91\": rpc error: code = NotFound desc = could not find container \"4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91\": container with ID starting with 4b7f386293f27b7f82a48d11c680cf4b171cc5fc4b435d5ebbbd59c77d020b91 not found: ID does not exist" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.122756 4876 scope.go:117] "RemoveContainer" containerID="b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783" Dec 15 07:16:33 crc kubenswrapper[4876]: E1215 07:16:33.123020 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783\": container with ID starting with b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783 not found: ID does not exist" containerID="b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.123039 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783"} err="failed to get container status \"b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783\": rpc error: code = NotFound desc = could not find container \"b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783\": container with ID starting with b73059a67da5978091f62409cc2d618cec5700661f3033362eae72d121c56783 not found: ID does not exist" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.123054 4876 scope.go:117] "RemoveContainer" containerID="ab2b7bcd77dc096c6f8bd4c255e23eb26198e47b55d43f5eb7d82a47546b3194" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.157168 4876 scope.go:117] "RemoveContainer" containerID="4df3f9d2120bea7d34cfae05a4f5df18902f5f757c6e24aab3dce17f1b9c115c" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.183226 4876 scope.go:117] "RemoveContainer" containerID="cbf0f151002b45d509e231e069399143add0e99bc811fdc2edeef06787b39699" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.214891 4876 scope.go:117] "RemoveContainer" containerID="4079f6a6d32eb7733edb32cf1b6f998b0b66e4e5adb9fa96b6a490f1de01b01b" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.261822 4876 scope.go:117] "RemoveContainer" containerID="cef34266913f1af42f423b9d794de57144b0ceaffd39d3e744bc8fb3a3cb3355" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.273931 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.274195 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"dd0403aa-07db-43b1-8df6-6317130cbd53","Type":"ContainerDied","Data":"11d6ca3e7efc70b6417baa19734580193698a947f639b46c4ed43c34cf18c17d"} Dec 15 07:16:33 crc kubenswrapper[4876]: E1215 07:16:33.281123 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 07:16:33 crc kubenswrapper[4876]: E1215 07:16:33.282348 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 07:16:33 crc kubenswrapper[4876]: E1215 07:16:33.294951 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 07:16:33 crc kubenswrapper[4876]: E1215 07:16:33.295013 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" containerName="nova-scheduler-scheduler" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.299397 4876 scope.go:117] "RemoveContainer" containerID="08cefbd86f57e35feb0652dfa8a1d0ef70620766258370c430d6aa69ef015c8f" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.573714 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.579008 4876 scope.go:117] "RemoveContainer" containerID="90ed53ef014d70db18f442d59174c6772f199e8d4213675027ef5c79f6be1513" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.580998 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.733940 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_fbe2efa0-0b59-42cb-93d7-3540f4b03613/ovn-northd/0.log" Dec 15 07:16:33 crc kubenswrapper[4876]: I1215 07:16:33.734007 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.875220 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-combined-ca-bundle\") pod \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.875817 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-northd-tls-certs\") pod \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.875928 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-rundir\") pod \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.876078 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5nh2\" (UniqueName: \"kubernetes.io/projected/fbe2efa0-0b59-42cb-93d7-3540f4b03613-kube-api-access-r5nh2\") pod \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.876143 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-metrics-certs-tls-certs\") pod \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.876197 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-scripts\") pod \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.876221 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-config\") pod \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\" (UID: \"fbe2efa0-0b59-42cb-93d7-3540f4b03613\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.878247 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-scripts" (OuterVolumeSpecName: "scripts") pod "fbe2efa0-0b59-42cb-93d7-3540f4b03613" (UID: "fbe2efa0-0b59-42cb-93d7-3540f4b03613"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.878303 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-config" (OuterVolumeSpecName: "config") pod "fbe2efa0-0b59-42cb-93d7-3540f4b03613" (UID: "fbe2efa0-0b59-42cb-93d7-3540f4b03613"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.878589 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "fbe2efa0-0b59-42cb-93d7-3540f4b03613" (UID: "fbe2efa0-0b59-42cb-93d7-3540f4b03613"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.885659 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbe2efa0-0b59-42cb-93d7-3540f4b03613-kube-api-access-r5nh2" (OuterVolumeSpecName: "kube-api-access-r5nh2") pod "fbe2efa0-0b59-42cb-93d7-3540f4b03613" (UID: "fbe2efa0-0b59-42cb-93d7-3540f4b03613"). InnerVolumeSpecName "kube-api-access-r5nh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.902695 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbe2efa0-0b59-42cb-93d7-3540f4b03613" (UID: "fbe2efa0-0b59-42cb-93d7-3540f4b03613"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.926321 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.955485 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.976280 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "fbe2efa0-0b59-42cb-93d7-3540f4b03613" (UID: "fbe2efa0-0b59-42cb-93d7-3540f4b03613"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.977967 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.978011 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5nh2\" (UniqueName: \"kubernetes.io/projected/fbe2efa0-0b59-42cb-93d7-3540f4b03613-kube-api-access-r5nh2\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.978020 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.978028 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbe2efa0-0b59-42cb-93d7-3540f4b03613-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.978037 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.978045 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:33.993071 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "fbe2efa0-0b59-42cb-93d7-3540f4b03613" (UID: "fbe2efa0-0b59-42cb-93d7-3540f4b03613"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.078753 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-erlang-cookie\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.078826 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.078856 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-plugins\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.078881 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.078900 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-confd\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.078936 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-default\") pod \"8a65a162-9f80-4d1c-be2c-001dedcb5391\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.078955 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-galera-tls-certs\") pod \"8a65a162-9f80-4d1c-be2c-001dedcb5391\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.078972 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/db3f4964-0cca-4527-93de-457292de4be7-erlang-cookie-secret\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079027 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-tls\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079060 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/db3f4964-0cca-4527-93de-457292de4be7-pod-info\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079086 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-kolla-config\") pod \"8a65a162-9f80-4d1c-be2c-001dedcb5391\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079138 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m725c\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-kube-api-access-m725c\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079162 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-operator-scripts\") pod \"8a65a162-9f80-4d1c-be2c-001dedcb5391\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079229 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnc4x\" (UniqueName: \"kubernetes.io/projected/8a65a162-9f80-4d1c-be2c-001dedcb5391-kube-api-access-vnc4x\") pod \"8a65a162-9f80-4d1c-be2c-001dedcb5391\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079263 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-plugins-conf\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079292 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-server-conf\") pod \"db3f4964-0cca-4527-93de-457292de4be7\" (UID: \"db3f4964-0cca-4527-93de-457292de4be7\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079327 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"8a65a162-9f80-4d1c-be2c-001dedcb5391\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079356 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-generated\") pod \"8a65a162-9f80-4d1c-be2c-001dedcb5391\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079377 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-combined-ca-bundle\") pod \"8a65a162-9f80-4d1c-be2c-001dedcb5391\" (UID: \"8a65a162-9f80-4d1c-be2c-001dedcb5391\") " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.079699 4876 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fbe2efa0-0b59-42cb-93d7-3540f4b03613-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.084647 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.084892 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.084966 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "8a65a162-9f80-4d1c-be2c-001dedcb5391" (UID: "8a65a162-9f80-4d1c-be2c-001dedcb5391"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.085514 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "8a65a162-9f80-4d1c-be2c-001dedcb5391" (UID: "8a65a162-9f80-4d1c-be2c-001dedcb5391"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.085755 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.086469 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8a65a162-9f80-4d1c-be2c-001dedcb5391" (UID: "8a65a162-9f80-4d1c-be2c-001dedcb5391"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.087242 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "8a65a162-9f80-4d1c-be2c-001dedcb5391" (UID: "8a65a162-9f80-4d1c-be2c-001dedcb5391"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.088987 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.089263 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.089409 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a65a162-9f80-4d1c-be2c-001dedcb5391-kube-api-access-vnc4x" (OuterVolumeSpecName: "kube-api-access-vnc4x") pod "8a65a162-9f80-4d1c-be2c-001dedcb5391" (UID: "8a65a162-9f80-4d1c-be2c-001dedcb5391"). InnerVolumeSpecName "kube-api-access-vnc4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.091473 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/db3f4964-0cca-4527-93de-457292de4be7-pod-info" (OuterVolumeSpecName: "pod-info") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.091529 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-kube-api-access-m725c" (OuterVolumeSpecName: "kube-api-access-m725c") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "kube-api-access-m725c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.096548 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "mysql-db") pod "8a65a162-9f80-4d1c-be2c-001dedcb5391" (UID: "8a65a162-9f80-4d1c-be2c-001dedcb5391"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.097188 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db3f4964-0cca-4527-93de-457292de4be7-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.113465 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data" (OuterVolumeSpecName: "config-data") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.114831 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a65a162-9f80-4d1c-be2c-001dedcb5391" (UID: "8a65a162-9f80-4d1c-be2c-001dedcb5391"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.129237 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-server-conf" (OuterVolumeSpecName: "server-conf") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.144451 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "8a65a162-9f80-4d1c-be2c-001dedcb5391" (UID: "8a65a162-9f80-4d1c-be2c-001dedcb5391"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.180684 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "db3f4964-0cca-4527-93de-457292de4be7" (UID: "db3f4964-0cca-4527-93de-457292de4be7"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181149 4876 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181169 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m725c\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-kube-api-access-m725c\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181182 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181195 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnc4x\" (UniqueName: \"kubernetes.io/projected/8a65a162-9f80-4d1c-be2c-001dedcb5391-kube-api-access-vnc4x\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181205 4876 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181215 4876 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-server-conf\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181244 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181256 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181269 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181281 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181296 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181309 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181321 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/db3f4964-0cca-4527-93de-457292de4be7-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181332 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181342 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8a65a162-9f80-4d1c-be2c-001dedcb5391-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181355 4876 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a65a162-9f80-4d1c-be2c-001dedcb5391-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181366 4876 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/db3f4964-0cca-4527-93de-457292de4be7-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181378 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/db3f4964-0cca-4527-93de-457292de4be7-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.181389 4876 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/db3f4964-0cca-4527-93de-457292de4be7-pod-info\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.196849 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.197898 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.283181 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.283206 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.292777 4876 generic.go:334] "Generic (PLEG): container finished" podID="8a65a162-9f80-4d1c-be2c-001dedcb5391" containerID="cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a" exitCode=0 Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.292832 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.292845 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8a65a162-9f80-4d1c-be2c-001dedcb5391","Type":"ContainerDied","Data":"cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a"} Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.292871 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"8a65a162-9f80-4d1c-be2c-001dedcb5391","Type":"ContainerDied","Data":"08e1864a7cf2d74fc6e6349c0836d270dd02e0735a8de0b438415c24433a1c6b"} Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.292888 4876 scope.go:117] "RemoveContainer" containerID="cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.298249 4876 generic.go:334] "Generic (PLEG): container finished" podID="db3f4964-0cca-4527-93de-457292de4be7" containerID="0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42" exitCode=0 Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.298292 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.298326 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"db3f4964-0cca-4527-93de-457292de4be7","Type":"ContainerDied","Data":"0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42"} Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.298690 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"db3f4964-0cca-4527-93de-457292de4be7","Type":"ContainerDied","Data":"38358edb6ed8d461080eae3715ff7851a148aed9711f8e564697fb2e343f3648"} Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.303800 4876 generic.go:334] "Generic (PLEG): container finished" podID="2a1020a1-7afe-46ee-b5c4-40a9290a05e1" containerID="ef11419ef47c4d37303e61eee95b4d049fd5985bb7603bfa53e9e8035128272c" exitCode=0 Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.303870 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2a1020a1-7afe-46ee-b5c4-40a9290a05e1","Type":"ContainerDied","Data":"ef11419ef47c4d37303e61eee95b4d049fd5985bb7603bfa53e9e8035128272c"} Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.306192 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_fbe2efa0-0b59-42cb-93d7-3540f4b03613/ovn-northd/0.log" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.306256 4876 generic.go:334] "Generic (PLEG): container finished" podID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerID="8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" exitCode=139 Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.306360 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.306909 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fbe2efa0-0b59-42cb-93d7-3540f4b03613","Type":"ContainerDied","Data":"8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1"} Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.306943 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fbe2efa0-0b59-42cb-93d7-3540f4b03613","Type":"ContainerDied","Data":"3f49bdc523e736103d24c8ab8b44ce7193b48c3e5191b3da9c18fae0590c95fa"} Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.310153 4876 generic.go:334] "Generic (PLEG): container finished" podID="d724d787-7189-4c81-94bf-08c2904deaf9" containerID="1332d0e465fbd1defca4d9cc7fb36daa61b5bbbf7ddc14584dbbe92e062e9452" exitCode=0 Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.310174 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8689b9f5b5-zrv9l" event={"ID":"d724d787-7189-4c81-94bf-08c2904deaf9","Type":"ContainerDied","Data":"1332d0e465fbd1defca4d9cc7fb36daa61b5bbbf7ddc14584dbbe92e062e9452"} Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.331453 4876 scope.go:117] "RemoveContainer" containerID="8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.347414 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.359771 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.366731 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.374112 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.382658 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.389531 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.396086 4876 scope.go:117] "RemoveContainer" containerID="cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a" Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.396635 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a\": container with ID starting with cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a not found: ID does not exist" containerID="cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.396668 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a"} err="failed to get container status \"cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a\": rpc error: code = NotFound desc = could not find container \"cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a\": container with ID starting with cbb1e158a7bd7e45773b3e447d4d4e0cc61cf6b38e63ceeaf02df08c11a8683a not found: ID does not exist" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.396696 4876 scope.go:117] "RemoveContainer" containerID="8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207" Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.396964 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207\": container with ID starting with 8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207 not found: ID does not exist" containerID="8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.397013 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207"} err="failed to get container status \"8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207\": rpc error: code = NotFound desc = could not find container \"8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207\": container with ID starting with 8384384dd697d69c7ce40635efed35639957f4cdb2d00470f5bc539b06d2f207 not found: ID does not exist" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.397050 4876 scope.go:117] "RemoveContainer" containerID="0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.416096 4876 scope.go:117] "RemoveContainer" containerID="8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.455455 4876 scope.go:117] "RemoveContainer" containerID="0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42" Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.458368 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42\": container with ID starting with 0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42 not found: ID does not exist" containerID="0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.458407 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42"} err="failed to get container status \"0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42\": rpc error: code = NotFound desc = could not find container \"0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42\": container with ID starting with 0888395f95f9e2f852fe7395353d85d55b224cc77f87853e9e77aee3de4cab42 not found: ID does not exist" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.458438 4876 scope.go:117] "RemoveContainer" containerID="8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f" Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.458851 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f\": container with ID starting with 8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f not found: ID does not exist" containerID="8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.458873 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f"} err="failed to get container status \"8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f\": rpc error: code = NotFound desc = could not find container \"8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f\": container with ID starting with 8a880c29d2b8b980bc6f25420920ccc7a20d30e5e920a559a51db438e95a6e5f not found: ID does not exist" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.458887 4876 scope.go:117] "RemoveContainer" containerID="c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.484040 4876 scope.go:117] "RemoveContainer" containerID="8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.486138 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.486212 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts podName:d5c5b73f-18fa-4bef-b443-15328769a818 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:38.486195248 +0000 UTC m=+1524.057338159 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts") pod "neutrona915-account-delete-gswz2" (UID: "d5c5b73f-18fa-4bef-b443-15328769a818") : configmap "openstack-scripts" not found Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.503476 4876 scope.go:117] "RemoveContainer" containerID="c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626" Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.504058 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626\": container with ID starting with c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626 not found: ID does not exist" containerID="c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.504310 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626"} err="failed to get container status \"c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626\": rpc error: code = NotFound desc = could not find container \"c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626\": container with ID starting with c00ec4540da3c0c04eaa637c66d103505015613ecc0c5d2803d2d54a88322626 not found: ID does not exist" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.504348 4876 scope.go:117] "RemoveContainer" containerID="8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.504763 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1\": container with ID starting with 8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1 not found: ID does not exist" containerID="8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.504789 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1"} err="failed to get container status \"8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1\": rpc error: code = NotFound desc = could not find container \"8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1\": container with ID starting with 8d21cad39a5775b70d2d322bef5056c0862b8edd130652cfe7781c182ebf59a1 not found: ID does not exist" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.588660 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-qnfks"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.602925 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-qnfks"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.610730 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance2c9d-account-delete-jqhxv"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.633622 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-2c9d-account-create-update-nlmt8"] Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.690345 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.690413 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts podName:478129d4-b31d-47e4-a6d4-b4aa7416356e nodeName:}" failed. No retries permitted until 2025-12-15 07:16:38.690398259 +0000 UTC m=+1524.261541170 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts") pod "novaapic196-account-delete-j7dqh" (UID: "478129d4-b31d-47e4-a6d4-b4aa7416356e") : configmap "openstack-scripts" not found Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.696099 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance2c9d-account-delete-jqhxv"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.748902 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" path="/var/lib/kubelet/pods/1639e7c7-83c0-4a60-9f59-5b31772b9f35/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.756621 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23569e4d-9d69-4947-9293-50d1667c1eda" path="/var/lib/kubelet/pods/23569e4d-9d69-4947-9293-50d1667c1eda/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.757233 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" path="/var/lib/kubelet/pods/3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.757756 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79b75337-8832-415e-a91f-2f8edd407cf1" path="/var/lib/kubelet/pods/79b75337-8832-415e-a91f-2f8edd407cf1/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.758947 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" path="/var/lib/kubelet/pods/83083084-55fd-4e95-87bf-bebcc2d41fb8/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.759686 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a65a162-9f80-4d1c-be2c-001dedcb5391" path="/var/lib/kubelet/pods/8a65a162-9f80-4d1c-be2c-001dedcb5391/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.760758 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9acc04fe-227b-4cfa-90d2-5d12001c6706" path="/var/lib/kubelet/pods/9acc04fe-227b-4cfa-90d2-5d12001c6706/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.761486 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d24022bd-28ab-402d-9078-c52208891ef8" path="/var/lib/kubelet/pods/d24022bd-28ab-402d-9078-c52208891ef8/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.762224 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db3f4964-0cca-4527-93de-457292de4be7" path="/var/lib/kubelet/pods/db3f4964-0cca-4527-93de-457292de4be7/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.799982 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd0403aa-07db-43b1-8df6-6317130cbd53" path="/var/lib/kubelet/pods/dd0403aa-07db-43b1-8df6-6317130cbd53/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.800854 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" path="/var/lib/kubelet/pods/fbe2efa0-0b59-42cb-93d7-3540f4b03613/volumes" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.818053 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-2c9d-account-create-update-nlmt8"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.818097 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-6l294"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.818135 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-6l294"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.818147 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement4aed-account-delete-jwsnh"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.818157 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement4aed-account-delete-jwsnh"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.818166 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-4aed-account-create-update-l827q"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.818174 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-4aed-account-create-update-l827q"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.818183 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-bsw24"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.818192 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-bsw24"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.830208 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinderfc60-account-delete-vg9nk"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.871201 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinderfc60-account-delete-vg9nk"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.895170 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-fc60-account-create-update-q7j8n"] Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.899541 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.899591 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts podName:bd3ee522-830b-4cde-ad14-15bcc28b7d06 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:38.899578214 +0000 UTC m=+1524.470721115 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts") pod "novacell0d7a8-account-delete-265rv" (UID: "bd3ee522-830b-4cde-ad14-15bcc28b7d06") : configmap "openstack-scripts" not found Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.899841 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:34 crc kubenswrapper[4876]: E1215 07:16:34.899867 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts podName:1501e554-af82-4179-959b-475a30b910d5 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:38.899860662 +0000 UTC m=+1524.471003573 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts") pod "barbican2a14-account-delete-mbwx7" (UID: "1501e554-af82-4179-959b-475a30b910d5") : configmap "openstack-scripts" not found Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.910898 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.913216 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-fc60-account-create-update-q7j8n"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.924187 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-qrp2v"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.936888 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-qrp2v"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.968240 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-2a14-account-create-update-98zfz"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.981924 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican2a14-account-delete-mbwx7"] Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.982170 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican2a14-account-delete-mbwx7" podUID="1501e554-af82-4179-959b-475a30b910d5" containerName="mariadb-account-delete" containerID="cri-o://9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2" gracePeriod=30 Dec 15 07:16:34 crc kubenswrapper[4876]: I1215 07:16:34.983770 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.000147 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-2a14-account-create-update-98zfz"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.000753 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-erlang-cookie\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.000812 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-plugins-conf\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.000832 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-tls\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.000876 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.000891 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.000938 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-erlang-cookie-secret\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.000952 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-server-conf\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.001005 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-plugins\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.001022 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-confd\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.001076 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-pod-info\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.001115 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtrfs\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-kube-api-access-qtrfs\") pod \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\" (UID: \"2a1020a1-7afe-46ee-b5c4-40a9290a05e1\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.005865 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-kube-api-access-qtrfs" (OuterVolumeSpecName: "kube-api-access-qtrfs") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "kube-api-access-qtrfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.006278 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.006711 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.015743 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-b45dn"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.021212 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.027307 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.029502 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-b45dn"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.031289 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.031455 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.031544 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-pod-info" (OuterVolumeSpecName: "pod-info") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.053767 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data" (OuterVolumeSpecName: "config-data") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.062147 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutrona915-account-delete-gswz2"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.062373 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutrona915-account-delete-gswz2" podUID="d5c5b73f-18fa-4bef-b443-15328769a818" containerName="mariadb-account-delete" containerID="cri-o://d0d09eace30f3a8203157172430444b7a9b0b3b017388393bd092a8a917b1184" gracePeriod=30 Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.085414 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-a915-account-create-update-rtgq8"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.091777 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-server-conf" (OuterVolumeSpecName: "server-conf") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.103307 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-a915-account-create-update-rtgq8"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104078 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-fernet-keys\") pod \"d724d787-7189-4c81-94bf-08c2904deaf9\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104137 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-public-tls-certs\") pod \"d724d787-7189-4c81-94bf-08c2904deaf9\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104173 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-config-data\") pod \"d724d787-7189-4c81-94bf-08c2904deaf9\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104207 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-combined-ca-bundle\") pod \"d724d787-7189-4c81-94bf-08c2904deaf9\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104230 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-scripts\") pod \"d724d787-7189-4c81-94bf-08c2904deaf9\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104295 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmz8z\" (UniqueName: \"kubernetes.io/projected/d724d787-7189-4c81-94bf-08c2904deaf9-kube-api-access-mmz8z\") pod \"d724d787-7189-4c81-94bf-08c2904deaf9\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104344 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-internal-tls-certs\") pod \"d724d787-7189-4c81-94bf-08c2904deaf9\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104364 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-credential-keys\") pod \"d724d787-7189-4c81-94bf-08c2904deaf9\" (UID: \"d724d787-7189-4c81-94bf-08c2904deaf9\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104657 4876 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-pod-info\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104672 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtrfs\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-kube-api-access-qtrfs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104683 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104692 4876 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104700 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104707 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104724 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104734 4876 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104742 4876 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-server-conf\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.104751 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.108242 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d724d787-7189-4c81-94bf-08c2904deaf9" (UID: "d724d787-7189-4c81-94bf-08c2904deaf9"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.111222 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d724d787-7189-4c81-94bf-08c2904deaf9-kube-api-access-mmz8z" (OuterVolumeSpecName: "kube-api-access-mmz8z") pod "d724d787-7189-4c81-94bf-08c2904deaf9" (UID: "d724d787-7189-4c81-94bf-08c2904deaf9"). InnerVolumeSpecName "kube-api-access-mmz8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.114322 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-scripts" (OuterVolumeSpecName: "scripts") pod "d724d787-7189-4c81-94bf-08c2904deaf9" (UID: "d724d787-7189-4c81-94bf-08c2904deaf9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.124332 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.130235 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d724d787-7189-4c81-94bf-08c2904deaf9" (UID: "d724d787-7189-4c81-94bf-08c2904deaf9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.138895 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d724d787-7189-4c81-94bf-08c2904deaf9" (UID: "d724d787-7189-4c81-94bf-08c2904deaf9"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.145113 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-2ht2h"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.168272 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-2ht2h"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.173178 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d724d787-7189-4c81-94bf-08c2904deaf9" (UID: "d724d787-7189-4c81-94bf-08c2904deaf9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.195315 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "2a1020a1-7afe-46ee-b5c4-40a9290a05e1" (UID: "2a1020a1-7afe-46ee-b5c4-40a9290a05e1"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.198628 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-config-data" (OuterVolumeSpecName: "config-data") pod "d724d787-7189-4c81-94bf-08c2904deaf9" (UID: "d724d787-7189-4c81-94bf-08c2904deaf9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.202459 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d724d787-7189-4c81-94bf-08c2904deaf9" (UID: "d724d787-7189-4c81-94bf-08c2904deaf9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.206493 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmz8z\" (UniqueName: \"kubernetes.io/projected/d724d787-7189-4c81-94bf-08c2904deaf9-kube-api-access-mmz8z\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.206517 4876 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.206529 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.206541 4876 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.206552 4876 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.206563 4876 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.206573 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2a1020a1-7afe-46ee-b5c4-40a9290a05e1-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.206583 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.206593 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.206603 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d724d787-7189-4c81-94bf-08c2904deaf9-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.207285 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-c196-account-create-update-629fc"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.214589 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapic196-account-delete-j7dqh"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.214844 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novaapic196-account-delete-j7dqh" podUID="478129d4-b31d-47e4-a6d4-b4aa7416356e" containerName="mariadb-account-delete" containerID="cri-o://a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4" gracePeriod=30 Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.222614 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-c196-account-create-update-629fc"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.230033 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-cfvxh"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.237612 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-cfvxh"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.247340 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0d7a8-account-delete-265rv"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.247533 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novacell0d7a8-account-delete-265rv" podUID="bd3ee522-830b-4cde-ad14-15bcc28b7d06" containerName="mariadb-account-delete" containerID="cri-o://4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d" gracePeriod=30 Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.259069 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-d7a8-account-create-update-9kn7n"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.259581 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-d7a8-account-create-update-9kn7n"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.321488 4876 generic.go:334] "Generic (PLEG): container finished" podID="8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" containerID="7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233" exitCode=0 Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.321543 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2","Type":"ContainerDied","Data":"7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233"} Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.327751 4876 generic.go:334] "Generic (PLEG): container finished" podID="f7e97610-516d-4609-911c-53124ace7db0" containerID="906e12a4e7207c08cbd8d84deaf12b6e44402bd7fad151f831fb9bbc8d378ea9" exitCode=0 Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.327791 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" event={"ID":"f7e97610-516d-4609-911c-53124ace7db0","Type":"ContainerDied","Data":"906e12a4e7207c08cbd8d84deaf12b6e44402bd7fad151f831fb9bbc8d378ea9"} Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.342453 4876 generic.go:334] "Generic (PLEG): container finished" podID="60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" containerID="529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f" exitCode=0 Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.342535 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7","Type":"ContainerDied","Data":"529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f"} Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.346671 4876 generic.go:334] "Generic (PLEG): container finished" podID="3b7ba44e-d87f-4b10-a601-eb425af47a70" containerID="5d1b2d9f1bf51ee65493ed30ecd5e5bab31cd11e0866d010e9194c3abf215bce" exitCode=0 Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.346712 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bffd5564f-rlt89" event={"ID":"3b7ba44e-d87f-4b10-a601-eb425af47a70","Type":"ContainerDied","Data":"5d1b2d9f1bf51ee65493ed30ecd5e5bab31cd11e0866d010e9194c3abf215bce"} Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.361076 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2a1020a1-7afe-46ee-b5c4-40a9290a05e1","Type":"ContainerDied","Data":"646c1828ce0e55cffe3ebf28236178105e80559b0ade6662412e0643eb8104fe"} Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.361214 4876 scope.go:117] "RemoveContainer" containerID="ef11419ef47c4d37303e61eee95b4d049fd5985bb7603bfa53e9e8035128272c" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.361329 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.366172 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-8689b9f5b5-zrv9l" event={"ID":"d724d787-7189-4c81-94bf-08c2904deaf9","Type":"ContainerDied","Data":"3d42f3f033fbb11378e4eef1568eca239a8d49b3bbcb47d9876068405c492a6c"} Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.366272 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-8689b9f5b5-zrv9l" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.413618 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.441406 4876 scope.go:117] "RemoveContainer" containerID="654aa2b251a3e8d15baaeefdef4e09e6982ddf90bc92e310071d54b23d02d13f" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.441576 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.468867 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.491187 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-8689b9f5b5-zrv9l"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.497299 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.507692 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-8689b9f5b5-zrv9l"] Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.509852 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7e97610-516d-4609-911c-53124ace7db0-logs\") pod \"f7e97610-516d-4609-911c-53124ace7db0\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.509911 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qdrv\" (UniqueName: \"kubernetes.io/projected/f7e97610-516d-4609-911c-53124ace7db0-kube-api-access-7qdrv\") pod \"f7e97610-516d-4609-911c-53124ace7db0\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.509946 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-combined-ca-bundle\") pod \"f7e97610-516d-4609-911c-53124ace7db0\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.509964 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data\") pod \"f7e97610-516d-4609-911c-53124ace7db0\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.510076 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data-custom\") pod \"f7e97610-516d-4609-911c-53124ace7db0\" (UID: \"f7e97610-516d-4609-911c-53124ace7db0\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.510546 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7e97610-516d-4609-911c-53124ace7db0-logs" (OuterVolumeSpecName: "logs") pod "f7e97610-516d-4609-911c-53124ace7db0" (UID: "f7e97610-516d-4609-911c-53124ace7db0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.513224 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f7e97610-516d-4609-911c-53124ace7db0" (UID: "f7e97610-516d-4609-911c-53124ace7db0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.516211 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7e97610-516d-4609-911c-53124ace7db0-kube-api-access-7qdrv" (OuterVolumeSpecName: "kube-api-access-7qdrv") pod "f7e97610-516d-4609-911c-53124ace7db0" (UID: "f7e97610-516d-4609-911c-53124ace7db0"). InnerVolumeSpecName "kube-api-access-7qdrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.520209 4876 scope.go:117] "RemoveContainer" containerID="1332d0e465fbd1defca4d9cc7fb36daa61b5bbbf7ddc14584dbbe92e062e9452" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.557490 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7e97610-516d-4609-911c-53124ace7db0" (UID: "f7e97610-516d-4609-911c-53124ace7db0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.575455 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data" (OuterVolumeSpecName: "config-data") pod "f7e97610-516d-4609-911c-53124ace7db0" (UID: "f7e97610-516d-4609-911c-53124ace7db0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.591504 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.614895 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b7ba44e-d87f-4b10-a601-eb425af47a70-logs\") pod \"3b7ba44e-d87f-4b10-a601-eb425af47a70\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.614998 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-combined-ca-bundle\") pod \"3b7ba44e-d87f-4b10-a601-eb425af47a70\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.615024 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2fg6\" (UniqueName: \"kubernetes.io/projected/3b7ba44e-d87f-4b10-a601-eb425af47a70-kube-api-access-s2fg6\") pod \"3b7ba44e-d87f-4b10-a601-eb425af47a70\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.615067 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data\") pod \"3b7ba44e-d87f-4b10-a601-eb425af47a70\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.615092 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data-custom\") pod \"3b7ba44e-d87f-4b10-a601-eb425af47a70\" (UID: \"3b7ba44e-d87f-4b10-a601-eb425af47a70\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.615429 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.615445 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7e97610-516d-4609-911c-53124ace7db0-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.615454 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qdrv\" (UniqueName: \"kubernetes.io/projected/f7e97610-516d-4609-911c-53124ace7db0-kube-api-access-7qdrv\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.615465 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.615473 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7e97610-516d-4609-911c-53124ace7db0-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.618722 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3b7ba44e-d87f-4b10-a601-eb425af47a70" (UID: "3b7ba44e-d87f-4b10-a601-eb425af47a70"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.619757 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b7ba44e-d87f-4b10-a601-eb425af47a70-kube-api-access-s2fg6" (OuterVolumeSpecName: "kube-api-access-s2fg6") pod "3b7ba44e-d87f-4b10-a601-eb425af47a70" (UID: "3b7ba44e-d87f-4b10-a601-eb425af47a70"). InnerVolumeSpecName "kube-api-access-s2fg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.619998 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b7ba44e-d87f-4b10-a601-eb425af47a70-logs" (OuterVolumeSpecName: "logs") pod "3b7ba44e-d87f-4b10-a601-eb425af47a70" (UID: "3b7ba44e-d87f-4b10-a601-eb425af47a70"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.661856 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data" (OuterVolumeSpecName: "config-data") pod "3b7ba44e-d87f-4b10-a601-eb425af47a70" (UID: "3b7ba44e-d87f-4b10-a601-eb425af47a70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.668575 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b7ba44e-d87f-4b10-a601-eb425af47a70" (UID: "3b7ba44e-d87f-4b10-a601-eb425af47a70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.715852 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5k4ww\" (UniqueName: \"kubernetes.io/projected/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-kube-api-access-5k4ww\") pod \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.715976 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-config-data\") pod \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.716042 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-combined-ca-bundle\") pod \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\" (UID: \"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.716341 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.716357 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2fg6\" (UniqueName: \"kubernetes.io/projected/3b7ba44e-d87f-4b10-a601-eb425af47a70-kube-api-access-s2fg6\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.716366 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.716374 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b7ba44e-d87f-4b10-a601-eb425af47a70-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.716383 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b7ba44e-d87f-4b10-a601-eb425af47a70-logs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.726225 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-kube-api-access-5k4ww" (OuterVolumeSpecName: "kube-api-access-5k4ww") pod "60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" (UID: "60c4c5c4-fe8f-45a7-8d0a-78976c3383a7"). InnerVolumeSpecName "kube-api-access-5k4ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.744242 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-config-data" (OuterVolumeSpecName: "config-data") pod "60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" (UID: "60c4c5c4-fe8f-45a7-8d0a-78976c3383a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.744464 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.752711 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" (UID: "60c4c5c4-fe8f-45a7-8d0a-78976c3383a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.817990 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-config-data\") pod \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.818052 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jh98q\" (UniqueName: \"kubernetes.io/projected/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-kube-api-access-jh98q\") pod \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.818151 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-combined-ca-bundle\") pod \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\" (UID: \"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2\") " Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.818467 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.818486 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.818496 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5k4ww\" (UniqueName: \"kubernetes.io/projected/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7-kube-api-access-5k4ww\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.822822 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-kube-api-access-jh98q" (OuterVolumeSpecName: "kube-api-access-jh98q") pod "8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" (UID: "8ecca0e4-3e21-43a7-90ce-9235fb7e46a2"). InnerVolumeSpecName "kube-api-access-jh98q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.844535 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" (UID: "8ecca0e4-3e21-43a7-90ce-9235fb7e46a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.848867 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-config-data" (OuterVolumeSpecName: "config-data") pod "8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" (UID: "8ecca0e4-3e21-43a7-90ce-9235fb7e46a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.920303 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.920338 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:35 crc kubenswrapper[4876]: I1215 07:16:35.920346 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jh98q\" (UniqueName: \"kubernetes.io/projected/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2-kube-api-access-jh98q\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.048339 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.048806 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.049280 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.049317 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.050369 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.052580 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.056358 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.056437 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.135945 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.224308 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-ceilometer-tls-certs\") pod \"efef2015-cf60-49ff-b90d-f1120822806c\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.224357 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-combined-ca-bundle\") pod \"efef2015-cf60-49ff-b90d-f1120822806c\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.224404 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-scripts\") pod \"efef2015-cf60-49ff-b90d-f1120822806c\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.224439 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-sg-core-conf-yaml\") pod \"efef2015-cf60-49ff-b90d-f1120822806c\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.224528 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-config-data\") pod \"efef2015-cf60-49ff-b90d-f1120822806c\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.224579 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-log-httpd\") pod \"efef2015-cf60-49ff-b90d-f1120822806c\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.224624 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ssng\" (UniqueName: \"kubernetes.io/projected/efef2015-cf60-49ff-b90d-f1120822806c-kube-api-access-4ssng\") pod \"efef2015-cf60-49ff-b90d-f1120822806c\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.224672 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-run-httpd\") pod \"efef2015-cf60-49ff-b90d-f1120822806c\" (UID: \"efef2015-cf60-49ff-b90d-f1120822806c\") " Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.224992 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "efef2015-cf60-49ff-b90d-f1120822806c" (UID: "efef2015-cf60-49ff-b90d-f1120822806c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.225089 4876 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.225165 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "efef2015-cf60-49ff-b90d-f1120822806c" (UID: "efef2015-cf60-49ff-b90d-f1120822806c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.229194 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efef2015-cf60-49ff-b90d-f1120822806c-kube-api-access-4ssng" (OuterVolumeSpecName: "kube-api-access-4ssng") pod "efef2015-cf60-49ff-b90d-f1120822806c" (UID: "efef2015-cf60-49ff-b90d-f1120822806c"). InnerVolumeSpecName "kube-api-access-4ssng". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.235331 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-scripts" (OuterVolumeSpecName: "scripts") pod "efef2015-cf60-49ff-b90d-f1120822806c" (UID: "efef2015-cf60-49ff-b90d-f1120822806c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.255902 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "efef2015-cf60-49ff-b90d-f1120822806c" (UID: "efef2015-cf60-49ff-b90d-f1120822806c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.290201 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "efef2015-cf60-49ff-b90d-f1120822806c" (UID: "efef2015-cf60-49ff-b90d-f1120822806c"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.295959 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "efef2015-cf60-49ff-b90d-f1120822806c" (UID: "efef2015-cf60-49ff-b90d-f1120822806c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.314074 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-config-data" (OuterVolumeSpecName: "config-data") pod "efef2015-cf60-49ff-b90d-f1120822806c" (UID: "efef2015-cf60-49ff-b90d-f1120822806c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.326079 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.326141 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ssng\" (UniqueName: \"kubernetes.io/projected/efef2015-cf60-49ff-b90d-f1120822806c-kube-api-access-4ssng\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.326156 4876 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efef2015-cf60-49ff-b90d-f1120822806c-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.326167 4876 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.326179 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.326189 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.326200 4876 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efef2015-cf60-49ff-b90d-f1120822806c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.377052 4876 generic.go:334] "Generic (PLEG): container finished" podID="efef2015-cf60-49ff-b90d-f1120822806c" containerID="48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9" exitCode=0 Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.377136 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.377142 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efef2015-cf60-49ff-b90d-f1120822806c","Type":"ContainerDied","Data":"48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9"} Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.377177 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efef2015-cf60-49ff-b90d-f1120822806c","Type":"ContainerDied","Data":"b1cabee56756691db02fdfd485f6fa75c9725c2dc24de542512d61abf72fe975"} Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.377195 4876 scope.go:117] "RemoveContainer" containerID="76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.379877 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"60c4c5c4-fe8f-45a7-8d0a-78976c3383a7","Type":"ContainerDied","Data":"4742792aa81c700479e739cf48000abc06720885c34fa9b25570ba7d3c44d69d"} Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.379955 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.383817 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"8ecca0e4-3e21-43a7-90ce-9235fb7e46a2","Type":"ContainerDied","Data":"e6e5150701fcca7fbae0b016e8dcc307c5c0db80ec9e85b4e9bc18b48616403c"} Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.383887 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.386411 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" event={"ID":"f7e97610-516d-4609-911c-53124ace7db0","Type":"ContainerDied","Data":"66dd6fa899aeaeb83ada6d4a9fbf7fac24e229f085219e79458f7c009d6a5203"} Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.386478 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-56dc944b8b-zz758" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.389697 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7bffd5564f-rlt89" event={"ID":"3b7ba44e-d87f-4b10-a601-eb425af47a70","Type":"ContainerDied","Data":"6cf64ea9fa713416542750a45774c2028b76b5c52ae3b2b2bf1c9122e9bb5bc6"} Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.389821 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7bffd5564f-rlt89" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.404765 4876 scope.go:117] "RemoveContainer" containerID="3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.433326 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.433401 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.459299 4876 scope.go:117] "RemoveContainer" containerID="48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.471967 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.495030 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.512281 4876 scope.go:117] "RemoveContainer" containerID="34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.529170 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-56dc944b8b-zz758"] Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.552191 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-56dc944b8b-zz758"] Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.565166 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-7bffd5564f-rlt89"] Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.580081 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-7bffd5564f-rlt89"] Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.584251 4876 scope.go:117] "RemoveContainer" containerID="76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9" Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.593287 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9\": container with ID starting with 76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9 not found: ID does not exist" containerID="76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.593337 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9"} err="failed to get container status \"76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9\": rpc error: code = NotFound desc = could not find container \"76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9\": container with ID starting with 76b060d6546fb5cd64a95810df1d802c7cdd0367633fee5e22bcdc08d01766c9 not found: ID does not exist" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.593366 4876 scope.go:117] "RemoveContainer" containerID="3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0" Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.599988 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0\": container with ID starting with 3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0 not found: ID does not exist" containerID="3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.600036 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0"} err="failed to get container status \"3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0\": rpc error: code = NotFound desc = could not find container \"3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0\": container with ID starting with 3510bb0cf4d41441b4ff62c8cf6c058e6adf5ad943f60c3eaf3d9dd368368aa0 not found: ID does not exist" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.600069 4876 scope.go:117] "RemoveContainer" containerID="48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.604186 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.608266 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.610097 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9\": container with ID starting with 48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9 not found: ID does not exist" containerID="48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.610168 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9"} err="failed to get container status \"48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9\": rpc error: code = NotFound desc = could not find container \"48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9\": container with ID starting with 48100921574b5b6f4754587e4b6a19eef7a299a070e10e1d014b58018a5dfee9 not found: ID does not exist" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.610221 4876 scope.go:117] "RemoveContainer" containerID="34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1" Dec 15 07:16:36 crc kubenswrapper[4876]: E1215 07:16:36.617556 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1\": container with ID starting with 34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1 not found: ID does not exist" containerID="34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.617604 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1"} err="failed to get container status \"34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1\": rpc error: code = NotFound desc = could not find container \"34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1\": container with ID starting with 34f08fad522da1b768f2588f521ae95237b2d9767f153d92799b100011ac14c1 not found: ID does not exist" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.617631 4876 scope.go:117] "RemoveContainer" containerID="529e700e46a55990579283a5f090774aba503bed0f2a720fc800bca1e0556b1f" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.666489 4876 scope.go:117] "RemoveContainer" containerID="7d5bb22b16fd1b930f8ef31f1056aea9bdf8f1978c5829e28143892d55758233" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.686453 4876 scope.go:117] "RemoveContainer" containerID="906e12a4e7207c08cbd8d84deaf12b6e44402bd7fad151f831fb9bbc8d378ea9" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.710182 4876 scope.go:117] "RemoveContainer" containerID="409c22ba1b246c89eacddbba5e405955d8477034e987e355d0b9e54624d39f46" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.713461 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15fd8b8b-f8fc-4b22-b367-0197ec641fa9" path="/var/lib/kubelet/pods/15fd8b8b-f8fc-4b22-b367-0197ec641fa9/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.714079 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="233c0ff5-7edd-41c4-8e16-7de48b9fc76c" path="/var/lib/kubelet/pods/233c0ff5-7edd-41c4-8e16-7de48b9fc76c/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.714757 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a1020a1-7afe-46ee-b5c4-40a9290a05e1" path="/var/lib/kubelet/pods/2a1020a1-7afe-46ee-b5c4-40a9290a05e1/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.715867 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fd1ac31-1a68-498e-b4af-af562ad7acd3" path="/var/lib/kubelet/pods/2fd1ac31-1a68-498e-b4af-af562ad7acd3/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.716412 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33d12d16-080f-4799-bae3-497babf7078f" path="/var/lib/kubelet/pods/33d12d16-080f-4799-bae3-497babf7078f/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.716969 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b7ba44e-d87f-4b10-a601-eb425af47a70" path="/var/lib/kubelet/pods/3b7ba44e-d87f-4b10-a601-eb425af47a70/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.718001 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56ef7f70-9cff-4db0-a699-f6b3496ed677" path="/var/lib/kubelet/pods/56ef7f70-9cff-4db0-a699-f6b3496ed677/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.718654 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd" path="/var/lib/kubelet/pods/58a82fa6-c3d8-42a1-a9c3-5f73f1030dcd/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.719393 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58f80b2c-85ec-4f49-976c-6bc8510e1fdb" path="/var/lib/kubelet/pods/58f80b2c-85ec-4f49-976c-6bc8510e1fdb/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.719900 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59ba3714-4516-4744-8efb-604800685bba" path="/var/lib/kubelet/pods/59ba3714-4516-4744-8efb-604800685bba/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.720407 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" path="/var/lib/kubelet/pods/60c4c5c4-fe8f-45a7-8d0a-78976c3383a7/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.721375 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72dd7822-885f-4b25-9843-39d598af3697" path="/var/lib/kubelet/pods/72dd7822-885f-4b25-9843-39d598af3697/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.721813 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" path="/var/lib/kubelet/pods/8ecca0e4-3e21-43a7-90ce-9235fb7e46a2/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.722359 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a502b9f7-3c6f-43fb-a1e7-b55ade6448a9" path="/var/lib/kubelet/pods/a502b9f7-3c6f-43fb-a1e7-b55ade6448a9/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.723265 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab6f16e6-ce19-4399-806b-e3d25e47cb5f" path="/var/lib/kubelet/pods/ab6f16e6-ce19-4399-806b-e3d25e47cb5f/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.723835 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b" path="/var/lib/kubelet/pods/b00bf5d6-e50c-41c6-ba57-b39a84d1bc6b/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.724390 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfed7402-4a97-4071-86db-ba4324dbe01d" path="/var/lib/kubelet/pods/bfed7402-4a97-4071-86db-ba4324dbe01d/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.725275 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d724d787-7189-4c81-94bf-08c2904deaf9" path="/var/lib/kubelet/pods/d724d787-7189-4c81-94bf-08c2904deaf9/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.725783 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9783849-3311-44bf-a6e0-126a3c1c6c9c" path="/var/lib/kubelet/pods/d9783849-3311-44bf-a6e0-126a3c1c6c9c/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.726258 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71" path="/var/lib/kubelet/pods/e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.726763 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efef2015-cf60-49ff-b90d-f1120822806c" path="/var/lib/kubelet/pods/efef2015-cf60-49ff-b90d-f1120822806c/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.728021 4876 scope.go:117] "RemoveContainer" containerID="5d1b2d9f1bf51ee65493ed30ecd5e5bab31cd11e0866d010e9194c3abf215bce" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.728328 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7e97610-516d-4609-911c-53124ace7db0" path="/var/lib/kubelet/pods/f7e97610-516d-4609-911c-53124ace7db0/volumes" Dec 15 07:16:36 crc kubenswrapper[4876]: I1215 07:16:36.765561 4876 scope.go:117] "RemoveContainer" containerID="cd961a36fd04b1c02785e7dd50ada2e1ee1120caf0386cf15d358851e661646b" Dec 15 07:16:38 crc kubenswrapper[4876]: I1215 07:16:38.194971 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-57dd59bc-td2ns" podUID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.148:9696/\": dial tcp 10.217.0.148:9696: connect: connection refused" Dec 15 07:16:38 crc kubenswrapper[4876]: E1215 07:16:38.566969 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:38 crc kubenswrapper[4876]: E1215 07:16:38.567042 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts podName:d5c5b73f-18fa-4bef-b443-15328769a818 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:46.567028055 +0000 UTC m=+1532.138170966 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts") pod "neutrona915-account-delete-gswz2" (UID: "d5c5b73f-18fa-4bef-b443-15328769a818") : configmap "openstack-scripts" not found Dec 15 07:16:38 crc kubenswrapper[4876]: E1215 07:16:38.769934 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:38 crc kubenswrapper[4876]: E1215 07:16:38.770012 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts podName:478129d4-b31d-47e4-a6d4-b4aa7416356e nodeName:}" failed. No retries permitted until 2025-12-15 07:16:46.769992983 +0000 UTC m=+1532.341135914 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts") pod "novaapic196-account-delete-j7dqh" (UID: "478129d4-b31d-47e4-a6d4-b4aa7416356e") : configmap "openstack-scripts" not found Dec 15 07:16:38 crc kubenswrapper[4876]: E1215 07:16:38.973009 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:38 crc kubenswrapper[4876]: E1215 07:16:38.973085 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts podName:bd3ee522-830b-4cde-ad14-15bcc28b7d06 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:46.973067734 +0000 UTC m=+1532.544210645 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts") pod "novacell0d7a8-account-delete-265rv" (UID: "bd3ee522-830b-4cde-ad14-15bcc28b7d06") : configmap "openstack-scripts" not found Dec 15 07:16:38 crc kubenswrapper[4876]: E1215 07:16:38.973679 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:38 crc kubenswrapper[4876]: E1215 07:16:38.973719 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts podName:1501e554-af82-4179-959b-475a30b910d5 nodeName:}" failed. No retries permitted until 2025-12-15 07:16:46.973708381 +0000 UTC m=+1532.544851292 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts") pod "barbican2a14-account-delete-mbwx7" (UID: "1501e554-af82-4179-959b-475a30b910d5") : configmap "openstack-scripts" not found Dec 15 07:16:41 crc kubenswrapper[4876]: E1215 07:16:41.048759 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:41 crc kubenswrapper[4876]: E1215 07:16:41.049506 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:41 crc kubenswrapper[4876]: E1215 07:16:41.055141 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:41 crc kubenswrapper[4876]: E1215 07:16:41.055563 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:41 crc kubenswrapper[4876]: E1215 07:16:41.055613 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" Dec 15 07:16:41 crc kubenswrapper[4876]: E1215 07:16:41.058119 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:41 crc kubenswrapper[4876]: E1215 07:16:41.060293 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:41 crc kubenswrapper[4876]: E1215 07:16:41.060436 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" Dec 15 07:16:42 crc kubenswrapper[4876]: I1215 07:16:42.708715 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:16:42 crc kubenswrapper[4876]: E1215 07:16:42.709715 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.062881 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.064482 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.064885 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.064876 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.064928 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.066504 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.067764 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.067802 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.592617 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.592711 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts podName:d5c5b73f-18fa-4bef-b443-15328769a818 nodeName:}" failed. No retries permitted until 2025-12-15 07:17:02.59268886 +0000 UTC m=+1548.163831801 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts") pod "neutrona915-account-delete-gswz2" (UID: "d5c5b73f-18fa-4bef-b443-15328769a818") : configmap "openstack-scripts" not found Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.796547 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:46 crc kubenswrapper[4876]: E1215 07:16:46.796609 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts podName:478129d4-b31d-47e4-a6d4-b4aa7416356e nodeName:}" failed. No retries permitted until 2025-12-15 07:17:02.796595644 +0000 UTC m=+1548.367738555 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts") pod "novaapic196-account-delete-j7dqh" (UID: "478129d4-b31d-47e4-a6d4-b4aa7416356e") : configmap "openstack-scripts" not found Dec 15 07:16:47 crc kubenswrapper[4876]: E1215 07:16:47.002142 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:47 crc kubenswrapper[4876]: E1215 07:16:47.002253 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts podName:bd3ee522-830b-4cde-ad14-15bcc28b7d06 nodeName:}" failed. No retries permitted until 2025-12-15 07:17:03.002229984 +0000 UTC m=+1548.573372905 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts") pod "novacell0d7a8-account-delete-265rv" (UID: "bd3ee522-830b-4cde-ad14-15bcc28b7d06") : configmap "openstack-scripts" not found Dec 15 07:16:47 crc kubenswrapper[4876]: E1215 07:16:47.002745 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:16:47 crc kubenswrapper[4876]: E1215 07:16:47.002788 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts podName:1501e554-af82-4179-959b-475a30b910d5 nodeName:}" failed. No retries permitted until 2025-12-15 07:17:03.002777928 +0000 UTC m=+1548.573920859 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts") pod "barbican2a14-account-delete-mbwx7" (UID: "1501e554-af82-4179-959b-475a30b910d5") : configmap "openstack-scripts" not found Dec 15 07:16:48 crc kubenswrapper[4876]: I1215 07:16:48.994144 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.147897 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-public-tls-certs\") pod \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.147944 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-httpd-config\") pod \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.147968 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjqqj\" (UniqueName: \"kubernetes.io/projected/1e9b5486-d793-41dc-b5f1-1f2085d7db79-kube-api-access-jjqqj\") pod \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.148038 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-combined-ca-bundle\") pod \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.148070 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-ovndb-tls-certs\") pod \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.148086 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-config\") pod \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.148185 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-internal-tls-certs\") pod \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\" (UID: \"1e9b5486-d793-41dc-b5f1-1f2085d7db79\") " Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.158315 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e9b5486-d793-41dc-b5f1-1f2085d7db79-kube-api-access-jjqqj" (OuterVolumeSpecName: "kube-api-access-jjqqj") pod "1e9b5486-d793-41dc-b5f1-1f2085d7db79" (UID: "1e9b5486-d793-41dc-b5f1-1f2085d7db79"). InnerVolumeSpecName "kube-api-access-jjqqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.158347 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "1e9b5486-d793-41dc-b5f1-1f2085d7db79" (UID: "1e9b5486-d793-41dc-b5f1-1f2085d7db79"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.198140 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1e9b5486-d793-41dc-b5f1-1f2085d7db79" (UID: "1e9b5486-d793-41dc-b5f1-1f2085d7db79"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.206521 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1e9b5486-d793-41dc-b5f1-1f2085d7db79" (UID: "1e9b5486-d793-41dc-b5f1-1f2085d7db79"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.206979 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-config" (OuterVolumeSpecName: "config") pod "1e9b5486-d793-41dc-b5f1-1f2085d7db79" (UID: "1e9b5486-d793-41dc-b5f1-1f2085d7db79"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.207635 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1e9b5486-d793-41dc-b5f1-1f2085d7db79" (UID: "1e9b5486-d793-41dc-b5f1-1f2085d7db79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.223539 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "1e9b5486-d793-41dc-b5f1-1f2085d7db79" (UID: "1e9b5486-d793-41dc-b5f1-1f2085d7db79"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.249484 4876 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.249522 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.249535 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjqqj\" (UniqueName: \"kubernetes.io/projected/1e9b5486-d793-41dc-b5f1-1f2085d7db79-kube-api-access-jjqqj\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.249547 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.249554 4876 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.249562 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-config\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.249573 4876 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9b5486-d793-41dc-b5f1-1f2085d7db79-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.509674 4876 generic.go:334] "Generic (PLEG): container finished" podID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerID="281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6" exitCode=0 Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.509711 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-57dd59bc-td2ns" event={"ID":"1e9b5486-d793-41dc-b5f1-1f2085d7db79","Type":"ContainerDied","Data":"281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6"} Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.509737 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-57dd59bc-td2ns" event={"ID":"1e9b5486-d793-41dc-b5f1-1f2085d7db79","Type":"ContainerDied","Data":"5cdd3742f3dafd4d1b2686f6ba04e093fd1505fccab185b64c0342c8f4cf64aa"} Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.509792 4876 scope.go:117] "RemoveContainer" containerID="9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.509798 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-57dd59bc-td2ns" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.533842 4876 scope.go:117] "RemoveContainer" containerID="281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.547661 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-57dd59bc-td2ns"] Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.553271 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-57dd59bc-td2ns"] Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.561948 4876 scope.go:117] "RemoveContainer" containerID="9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6" Dec 15 07:16:49 crc kubenswrapper[4876]: E1215 07:16:49.562533 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6\": container with ID starting with 9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6 not found: ID does not exist" containerID="9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.562571 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6"} err="failed to get container status \"9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6\": rpc error: code = NotFound desc = could not find container \"9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6\": container with ID starting with 9b1285b77e209a92050e57b6e8f481fcc4b5d5beaf86fdf23bdfa4d0b964aee6 not found: ID does not exist" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.562597 4876 scope.go:117] "RemoveContainer" containerID="281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6" Dec 15 07:16:49 crc kubenswrapper[4876]: E1215 07:16:49.562938 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6\": container with ID starting with 281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6 not found: ID does not exist" containerID="281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6" Dec 15 07:16:49 crc kubenswrapper[4876]: I1215 07:16:49.562961 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6"} err="failed to get container status \"281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6\": rpc error: code = NotFound desc = could not find container \"281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6\": container with ID starting with 281ca0ad8e2ff1d8250884c0cae460d35bf50df15d3059342d38e4b4d94aabc6 not found: ID does not exist" Dec 15 07:16:50 crc kubenswrapper[4876]: I1215 07:16:50.712745 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" path="/var/lib/kubelet/pods/1e9b5486-d793-41dc-b5f1-1f2085d7db79/volumes" Dec 15 07:16:51 crc kubenswrapper[4876]: E1215 07:16:51.048196 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:51 crc kubenswrapper[4876]: E1215 07:16:51.048628 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:51 crc kubenswrapper[4876]: E1215 07:16:51.048847 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:51 crc kubenswrapper[4876]: E1215 07:16:51.049582 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:51 crc kubenswrapper[4876]: E1215 07:16:51.049619 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" Dec 15 07:16:51 crc kubenswrapper[4876]: E1215 07:16:51.050332 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:51 crc kubenswrapper[4876]: E1215 07:16:51.051676 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:51 crc kubenswrapper[4876]: E1215 07:16:51.051708 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.047649 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093 is running failed: container process not found" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.048624 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.049141 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093 is running failed: container process not found" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.049254 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.049568 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093 is running failed: container process not found" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.049605 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.049603 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.049646 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-gbp2z" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.200954 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gbp2z_b2f6914b-2d26-4417-a7c0-21eaf29a18bf/ovs-vswitchd/0.log" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.202194 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.253823 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vwks\" (UniqueName: \"kubernetes.io/projected/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-kube-api-access-7vwks\") pod \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.253948 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-lib\") pod \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.254027 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-log\") pod \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.254062 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-etc-ovs\") pod \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.254161 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-log" (OuterVolumeSpecName: "var-log") pod "b2f6914b-2d26-4417-a7c0-21eaf29a18bf" (UID: "b2f6914b-2d26-4417-a7c0-21eaf29a18bf"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.254205 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "b2f6914b-2d26-4417-a7c0-21eaf29a18bf" (UID: "b2f6914b-2d26-4417-a7c0-21eaf29a18bf"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.254186 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-scripts\") pod \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.254161 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-lib" (OuterVolumeSpecName: "var-lib") pod "b2f6914b-2d26-4417-a7c0-21eaf29a18bf" (UID: "b2f6914b-2d26-4417-a7c0-21eaf29a18bf"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.254426 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-run\") pod \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\" (UID: \"b2f6914b-2d26-4417-a7c0-21eaf29a18bf\") " Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.255353 4876 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-lib\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.255383 4876 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-log\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.255396 4876 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-etc-ovs\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.255431 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-run" (OuterVolumeSpecName: "var-run") pod "b2f6914b-2d26-4417-a7c0-21eaf29a18bf" (UID: "b2f6914b-2d26-4417-a7c0-21eaf29a18bf"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.255534 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-scripts" (OuterVolumeSpecName: "scripts") pod "b2f6914b-2d26-4417-a7c0-21eaf29a18bf" (UID: "b2f6914b-2d26-4417-a7c0-21eaf29a18bf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.269134 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-kube-api-access-7vwks" (OuterVolumeSpecName: "kube-api-access-7vwks") pod "b2f6914b-2d26-4417-a7c0-21eaf29a18bf" (UID: "b2f6914b-2d26-4417-a7c0-21eaf29a18bf"). InnerVolumeSpecName "kube-api-access-7vwks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.357263 4876 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-var-run\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.357314 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vwks\" (UniqueName: \"kubernetes.io/projected/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-kube-api-access-7vwks\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.357335 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2f6914b-2d26-4417-a7c0-21eaf29a18bf-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.586261 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-gbp2z_b2f6914b-2d26-4417-a7c0-21eaf29a18bf/ovs-vswitchd/0.log" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.587923 4876 generic.go:334] "Generic (PLEG): container finished" podID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" exitCode=137 Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.587973 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gbp2z" event={"ID":"b2f6914b-2d26-4417-a7c0-21eaf29a18bf","Type":"ContainerDied","Data":"d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093"} Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.588063 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-gbp2z" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.588611 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-gbp2z" event={"ID":"b2f6914b-2d26-4417-a7c0-21eaf29a18bf","Type":"ContainerDied","Data":"26e406bd663fd61e4936f5eddbf56ff2545cfa4800c5f7935b314d6a4a0f68ca"} Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.588837 4876 scope.go:117] "RemoveContainer" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.707168 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.707579 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.725396 4876 scope.go:117] "RemoveContainer" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.733151 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-gbp2z"] Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.740707 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-gbp2z"] Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.761059 4876 scope.go:117] "RemoveContainer" containerID="fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.785970 4876 scope.go:117] "RemoveContainer" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.786411 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093\": container with ID starting with d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093 not found: ID does not exist" containerID="d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.786450 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093"} err="failed to get container status \"d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093\": rpc error: code = NotFound desc = could not find container \"d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093\": container with ID starting with d7caaa2c3a0141c683c26ac66913eafa1892e5ccbde3737535cffad4c35b6093 not found: ID does not exist" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.786482 4876 scope.go:117] "RemoveContainer" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.787536 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a\": container with ID starting with 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a not found: ID does not exist" containerID="63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.787570 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a"} err="failed to get container status \"63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a\": rpc error: code = NotFound desc = could not find container \"63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a\": container with ID starting with 63970b2f09450dc58b5b602236f1bc9c1dc0de69980802a0ae45521f3db28d8a not found: ID does not exist" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.787593 4876 scope.go:117] "RemoveContainer" containerID="fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649" Dec 15 07:16:56 crc kubenswrapper[4876]: E1215 07:16:56.787833 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649\": container with ID starting with fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649 not found: ID does not exist" containerID="fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649" Dec 15 07:16:56 crc kubenswrapper[4876]: I1215 07:16:56.787861 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649"} err="failed to get container status \"fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649\": rpc error: code = NotFound desc = could not find container \"fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649\": container with ID starting with fc3cad55b206109d90c7d4d257a931b42d2c6fc2e7f9c3319d6597f127586649 not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.113479 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.169410 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq5zf\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-kube-api-access-fq5zf\") pod \"d91d3fea-2b02-48ad-b238-7a815dd36d22\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.169515 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-lock\") pod \"d91d3fea-2b02-48ad-b238-7a815dd36d22\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.169593 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") pod \"d91d3fea-2b02-48ad-b238-7a815dd36d22\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.169625 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-cache\") pod \"d91d3fea-2b02-48ad-b238-7a815dd36d22\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.169644 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"d91d3fea-2b02-48ad-b238-7a815dd36d22\" (UID: \"d91d3fea-2b02-48ad-b238-7a815dd36d22\") " Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.170086 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-lock" (OuterVolumeSpecName: "lock") pod "d91d3fea-2b02-48ad-b238-7a815dd36d22" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.170526 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-cache" (OuterVolumeSpecName: "cache") pod "d91d3fea-2b02-48ad-b238-7a815dd36d22" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.173922 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "swift") pod "d91d3fea-2b02-48ad-b238-7a815dd36d22" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.174202 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d91d3fea-2b02-48ad-b238-7a815dd36d22" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.174280 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-kube-api-access-fq5zf" (OuterVolumeSpecName: "kube-api-access-fq5zf") pod "d91d3fea-2b02-48ad-b238-7a815dd36d22" (UID: "d91d3fea-2b02-48ad-b238-7a815dd36d22"). InnerVolumeSpecName "kube-api-access-fq5zf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.271155 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq5zf\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-kube-api-access-fq5zf\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.271189 4876 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-lock\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.271200 4876 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d91d3fea-2b02-48ad-b238-7a815dd36d22-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.271209 4876 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d91d3fea-2b02-48ad-b238-7a815dd36d22-cache\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.271237 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.284292 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.372464 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.602692 4876 generic.go:334] "Generic (PLEG): container finished" podID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerID="38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e" exitCode=137 Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.602756 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e"} Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.602785 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d91d3fea-2b02-48ad-b238-7a815dd36d22","Type":"ContainerDied","Data":"fde5a4e492ac9fef9a8cd82a7e603d6e2880e94f051566d04045b93a28f7d7aa"} Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.602801 4876 scope.go:117] "RemoveContainer" containerID="38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.602927 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.633347 4876 scope.go:117] "RemoveContainer" containerID="5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.636527 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.653312 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.667828 4876 scope.go:117] "RemoveContainer" containerID="531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.691283 4876 scope.go:117] "RemoveContainer" containerID="3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.717506 4876 scope.go:117] "RemoveContainer" containerID="e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.744916 4876 scope.go:117] "RemoveContainer" containerID="6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.762831 4876 scope.go:117] "RemoveContainer" containerID="f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.780729 4876 scope.go:117] "RemoveContainer" containerID="6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.797487 4876 scope.go:117] "RemoveContainer" containerID="43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.813368 4876 scope.go:117] "RemoveContainer" containerID="ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.831845 4876 scope.go:117] "RemoveContainer" containerID="61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.849159 4876 scope.go:117] "RemoveContainer" containerID="a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.866141 4876 scope.go:117] "RemoveContainer" containerID="6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.890296 4876 scope.go:117] "RemoveContainer" containerID="b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.914504 4876 scope.go:117] "RemoveContainer" containerID="ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.937279 4876 scope.go:117] "RemoveContainer" containerID="38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.939157 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e\": container with ID starting with 38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e not found: ID does not exist" containerID="38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.939210 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e"} err="failed to get container status \"38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e\": rpc error: code = NotFound desc = could not find container \"38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e\": container with ID starting with 38297dc82a760b9d054888f3643370d7e874df4a68c35498f62d72fc097c653e not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.939235 4876 scope.go:117] "RemoveContainer" containerID="5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.939583 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d\": container with ID starting with 5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d not found: ID does not exist" containerID="5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.939611 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d"} err="failed to get container status \"5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d\": rpc error: code = NotFound desc = could not find container \"5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d\": container with ID starting with 5a2eb6507d3dec240013d663ca109c117aae008737b791cb5a94b007b76be21d not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.939628 4876 scope.go:117] "RemoveContainer" containerID="531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.940125 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81\": container with ID starting with 531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81 not found: ID does not exist" containerID="531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.940155 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81"} err="failed to get container status \"531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81\": rpc error: code = NotFound desc = could not find container \"531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81\": container with ID starting with 531830b04dc558f8bca5d642d5282a6b5ffdf92398a6ab4a39803063c2e3ad81 not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.940176 4876 scope.go:117] "RemoveContainer" containerID="3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.940620 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df\": container with ID starting with 3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df not found: ID does not exist" containerID="3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.940649 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df"} err="failed to get container status \"3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df\": rpc error: code = NotFound desc = could not find container \"3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df\": container with ID starting with 3733875283f536781c79492d9f7c0c73eb0ae30217bbe4b1c65b9781692812df not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.940687 4876 scope.go:117] "RemoveContainer" containerID="e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.941127 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0\": container with ID starting with e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0 not found: ID does not exist" containerID="e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.941152 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0"} err="failed to get container status \"e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0\": rpc error: code = NotFound desc = could not find container \"e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0\": container with ID starting with e41dbb79242518af45aa59143cb7fd72f06571ad55e71b7acf07268431889fc0 not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.941167 4876 scope.go:117] "RemoveContainer" containerID="6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.941442 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328\": container with ID starting with 6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328 not found: ID does not exist" containerID="6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.941462 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328"} err="failed to get container status \"6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328\": rpc error: code = NotFound desc = could not find container \"6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328\": container with ID starting with 6821e960b95540ace2d1816ace62466ec824072877dcbb9f857a29cb1290f328 not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.941478 4876 scope.go:117] "RemoveContainer" containerID="f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.941738 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b\": container with ID starting with f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b not found: ID does not exist" containerID="f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.941781 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b"} err="failed to get container status \"f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b\": rpc error: code = NotFound desc = could not find container \"f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b\": container with ID starting with f75e804142c56a1a8de65b67983677aac3ee76d0fd46db5748b853caddbeff9b not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.941798 4876 scope.go:117] "RemoveContainer" containerID="6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.942049 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646\": container with ID starting with 6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646 not found: ID does not exist" containerID="6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.942069 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646"} err="failed to get container status \"6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646\": rpc error: code = NotFound desc = could not find container \"6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646\": container with ID starting with 6c760bd61cb0c1951c7d701095b5fc8ec59cfa9edcac15f52558e4786f572646 not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.942080 4876 scope.go:117] "RemoveContainer" containerID="43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.942441 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d\": container with ID starting with 43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d not found: ID does not exist" containerID="43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.942462 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d"} err="failed to get container status \"43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d\": rpc error: code = NotFound desc = could not find container \"43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d\": container with ID starting with 43b0d349b4b047174912cefbfd716cc1f0bcbc7e87ba97930538be40757ede2d not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.942480 4876 scope.go:117] "RemoveContainer" containerID="ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.942732 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9\": container with ID starting with ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9 not found: ID does not exist" containerID="ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.942751 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9"} err="failed to get container status \"ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9\": rpc error: code = NotFound desc = could not find container \"ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9\": container with ID starting with ca753790e69b28a72bc016541a4407db9dee9be7e9c9e61cfe6704a3cc5ef5f9 not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.942763 4876 scope.go:117] "RemoveContainer" containerID="61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.942965 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0\": container with ID starting with 61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0 not found: ID does not exist" containerID="61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.942983 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0"} err="failed to get container status \"61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0\": rpc error: code = NotFound desc = could not find container \"61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0\": container with ID starting with 61cca6179255da5cae2a55dd80d17b436a0d6abee82da9d847db5542446e41f0 not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.942995 4876 scope.go:117] "RemoveContainer" containerID="a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.943248 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e\": container with ID starting with a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e not found: ID does not exist" containerID="a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.943267 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e"} err="failed to get container status \"a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e\": rpc error: code = NotFound desc = could not find container \"a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e\": container with ID starting with a02e287310acf7f09c64d5b6e77f8f0f9d181fd44cd53d2247af89e6d03fc31e not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.943280 4876 scope.go:117] "RemoveContainer" containerID="6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.943550 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9\": container with ID starting with 6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9 not found: ID does not exist" containerID="6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.943567 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9"} err="failed to get container status \"6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9\": rpc error: code = NotFound desc = could not find container \"6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9\": container with ID starting with 6008a7cbce4557cb133d46389c8eb0d58101b41279d4a35f99ea34720d19f1f9 not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.943578 4876 scope.go:117] "RemoveContainer" containerID="b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.943844 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c\": container with ID starting with b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c not found: ID does not exist" containerID="b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.943862 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c"} err="failed to get container status \"b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c\": rpc error: code = NotFound desc = could not find container \"b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c\": container with ID starting with b08deeec6ede60bdc4ecd135d3c5ffd2102e1ff4ade13f96676fbb21b83fbc7c not found: ID does not exist" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.943877 4876 scope.go:117] "RemoveContainer" containerID="ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9" Dec 15 07:16:57 crc kubenswrapper[4876]: E1215 07:16:57.944192 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9\": container with ID starting with ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9 not found: ID does not exist" containerID="ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9" Dec 15 07:16:57 crc kubenswrapper[4876]: I1215 07:16:57.944211 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9"} err="failed to get container status \"ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9\": rpc error: code = NotFound desc = could not find container \"ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9\": container with ID starting with ab838bbc41032f03cf24defc1924aac656b38030cdf199d49b116c9dedb0a7a9 not found: ID does not exist" Dec 15 07:16:58 crc kubenswrapper[4876]: I1215 07:16:58.720590 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" path="/var/lib/kubelet/pods/b2f6914b-2d26-4417-a7c0-21eaf29a18bf/volumes" Dec 15 07:16:58 crc kubenswrapper[4876]: I1215 07:16:58.721884 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" path="/var/lib/kubelet/pods/d91d3fea-2b02-48ad-b238-7a815dd36d22/volumes" Dec 15 07:17:01 crc kubenswrapper[4876]: I1215 07:17:01.192998 4876 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod769bb0f8-96fe-485a-adfd-e51747bbff86"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod769bb0f8-96fe-485a-adfd-e51747bbff86] : Timed out while waiting for systemd to remove kubepods-besteffort-pod769bb0f8_96fe_485a_adfd_e51747bbff86.slice" Dec 15 07:17:02 crc kubenswrapper[4876]: E1215 07:17:02.659740 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:17:02 crc kubenswrapper[4876]: E1215 07:17:02.660357 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts podName:d5c5b73f-18fa-4bef-b443-15328769a818 nodeName:}" failed. No retries permitted until 2025-12-15 07:17:34.660342441 +0000 UTC m=+1580.231485352 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts") pod "neutrona915-account-delete-gswz2" (UID: "d5c5b73f-18fa-4bef-b443-15328769a818") : configmap "openstack-scripts" not found Dec 15 07:17:02 crc kubenswrapper[4876]: E1215 07:17:02.862828 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:17:02 crc kubenswrapper[4876]: E1215 07:17:02.863175 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts podName:478129d4-b31d-47e4-a6d4-b4aa7416356e nodeName:}" failed. No retries permitted until 2025-12-15 07:17:34.863158415 +0000 UTC m=+1580.434301326 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts") pod "novaapic196-account-delete-j7dqh" (UID: "478129d4-b31d-47e4-a6d4-b4aa7416356e") : configmap "openstack-scripts" not found Dec 15 07:17:03 crc kubenswrapper[4876]: E1215 07:17:03.065712 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:17:03 crc kubenswrapper[4876]: E1215 07:17:03.066052 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts podName:bd3ee522-830b-4cde-ad14-15bcc28b7d06 nodeName:}" failed. No retries permitted until 2025-12-15 07:17:35.066024901 +0000 UTC m=+1580.637167852 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts") pod "novacell0d7a8-account-delete-265rv" (UID: "bd3ee522-830b-4cde-ad14-15bcc28b7d06") : configmap "openstack-scripts" not found Dec 15 07:17:03 crc kubenswrapper[4876]: E1215 07:17:03.065788 4876 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 15 07:17:03 crc kubenswrapper[4876]: E1215 07:17:03.066430 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts podName:1501e554-af82-4179-959b-475a30b910d5 nodeName:}" failed. No retries permitted until 2025-12-15 07:17:35.066410051 +0000 UTC m=+1580.637553002 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts") pod "barbican2a14-account-delete-mbwx7" (UID: "1501e554-af82-4179-959b-475a30b910d5") : configmap "openstack-scripts" not found Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.443894 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican2a14-account-delete-mbwx7" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.515051 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts\") pod \"1501e554-af82-4179-959b-475a30b910d5\" (UID: \"1501e554-af82-4179-959b-475a30b910d5\") " Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.515160 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvd7q\" (UniqueName: \"kubernetes.io/projected/1501e554-af82-4179-959b-475a30b910d5-kube-api-access-cvd7q\") pod \"1501e554-af82-4179-959b-475a30b910d5\" (UID: \"1501e554-af82-4179-959b-475a30b910d5\") " Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.516994 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1501e554-af82-4179-959b-475a30b910d5" (UID: "1501e554-af82-4179-959b-475a30b910d5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.530832 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1501e554-af82-4179-959b-475a30b910d5-kube-api-access-cvd7q" (OuterVolumeSpecName: "kube-api-access-cvd7q") pod "1501e554-af82-4179-959b-475a30b910d5" (UID: "1501e554-af82-4179-959b-475a30b910d5"). InnerVolumeSpecName "kube-api-access-cvd7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.616749 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1501e554-af82-4179-959b-475a30b910d5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.616785 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvd7q\" (UniqueName: \"kubernetes.io/projected/1501e554-af82-4179-959b-475a30b910d5-kube-api-access-cvd7q\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.646364 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic196-account-delete-j7dqh" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.651008 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0d7a8-account-delete-265rv" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.717804 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts\") pod \"bd3ee522-830b-4cde-ad14-15bcc28b7d06\" (UID: \"bd3ee522-830b-4cde-ad14-15bcc28b7d06\") " Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.717916 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twr74\" (UniqueName: \"kubernetes.io/projected/bd3ee522-830b-4cde-ad14-15bcc28b7d06-kube-api-access-twr74\") pod \"bd3ee522-830b-4cde-ad14-15bcc28b7d06\" (UID: \"bd3ee522-830b-4cde-ad14-15bcc28b7d06\") " Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.717955 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts\") pod \"478129d4-b31d-47e4-a6d4-b4aa7416356e\" (UID: \"478129d4-b31d-47e4-a6d4-b4aa7416356e\") " Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.717983 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5k8w\" (UniqueName: \"kubernetes.io/projected/478129d4-b31d-47e4-a6d4-b4aa7416356e-kube-api-access-x5k8w\") pod \"478129d4-b31d-47e4-a6d4-b4aa7416356e\" (UID: \"478129d4-b31d-47e4-a6d4-b4aa7416356e\") " Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.720869 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bd3ee522-830b-4cde-ad14-15bcc28b7d06" (UID: "bd3ee522-830b-4cde-ad14-15bcc28b7d06"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.721293 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/478129d4-b31d-47e4-a6d4-b4aa7416356e-kube-api-access-x5k8w" (OuterVolumeSpecName: "kube-api-access-x5k8w") pod "478129d4-b31d-47e4-a6d4-b4aa7416356e" (UID: "478129d4-b31d-47e4-a6d4-b4aa7416356e"). InnerVolumeSpecName "kube-api-access-x5k8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.721440 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "478129d4-b31d-47e4-a6d4-b4aa7416356e" (UID: "478129d4-b31d-47e4-a6d4-b4aa7416356e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.722745 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd3ee522-830b-4cde-ad14-15bcc28b7d06-kube-api-access-twr74" (OuterVolumeSpecName: "kube-api-access-twr74") pod "bd3ee522-830b-4cde-ad14-15bcc28b7d06" (UID: "bd3ee522-830b-4cde-ad14-15bcc28b7d06"). InnerVolumeSpecName "kube-api-access-twr74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.724830 4876 generic.go:334] "Generic (PLEG): container finished" podID="478129d4-b31d-47e4-a6d4-b4aa7416356e" containerID="a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4" exitCode=137 Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.724993 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapic196-account-delete-j7dqh" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.725070 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic196-account-delete-j7dqh" event={"ID":"478129d4-b31d-47e4-a6d4-b4aa7416356e","Type":"ContainerDied","Data":"a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4"} Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.725255 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapic196-account-delete-j7dqh" event={"ID":"478129d4-b31d-47e4-a6d4-b4aa7416356e","Type":"ContainerDied","Data":"6a0be5594bf1164434d9a16c353a1598f9b8d028647a4bfad9be45335e2b960c"} Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.725280 4876 scope.go:117] "RemoveContainer" containerID="a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.780463 4876 generic.go:334] "Generic (PLEG): container finished" podID="1501e554-af82-4179-959b-475a30b910d5" containerID="9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2" exitCode=137 Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.780588 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican2a14-account-delete-mbwx7" event={"ID":"1501e554-af82-4179-959b-475a30b910d5","Type":"ContainerDied","Data":"9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2"} Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.780624 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican2a14-account-delete-mbwx7" event={"ID":"1501e554-af82-4179-959b-475a30b910d5","Type":"ContainerDied","Data":"655465f7ba0c472bb7728b449b956869a7ac09d9d41976e75c800a71a4968690"} Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.780634 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican2a14-account-delete-mbwx7" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.825901 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twr74\" (UniqueName: \"kubernetes.io/projected/bd3ee522-830b-4cde-ad14-15bcc28b7d06-kube-api-access-twr74\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.825924 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/478129d4-b31d-47e4-a6d4-b4aa7416356e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.825934 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5k8w\" (UniqueName: \"kubernetes.io/projected/478129d4-b31d-47e4-a6d4-b4aa7416356e-kube-api-access-x5k8w\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.825943 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bd3ee522-830b-4cde-ad14-15bcc28b7d06-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.837527 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapic196-account-delete-j7dqh"] Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.837609 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapic196-account-delete-j7dqh"] Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.852459 4876 scope.go:117] "RemoveContainer" containerID="a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4" Dec 15 07:17:05 crc kubenswrapper[4876]: E1215 07:17:05.855012 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4\": container with ID starting with a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4 not found: ID does not exist" containerID="a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.855049 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4"} err="failed to get container status \"a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4\": rpc error: code = NotFound desc = could not find container \"a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4\": container with ID starting with a93860daadc9b4afc9c32191902c16b7be297956e90c1c61282de226aad5afd4 not found: ID does not exist" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.855070 4876 scope.go:117] "RemoveContainer" containerID="9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.855237 4876 generic.go:334] "Generic (PLEG): container finished" podID="d5c5b73f-18fa-4bef-b443-15328769a818" containerID="d0d09eace30f3a8203157172430444b7a9b0b3b017388393bd092a8a917b1184" exitCode=137 Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.855285 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutrona915-account-delete-gswz2" event={"ID":"d5c5b73f-18fa-4bef-b443-15328769a818","Type":"ContainerDied","Data":"d0d09eace30f3a8203157172430444b7a9b0b3b017388393bd092a8a917b1184"} Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.889339 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican2a14-account-delete-mbwx7"] Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.895635 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican2a14-account-delete-mbwx7"] Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.929882 4876 generic.go:334] "Generic (PLEG): container finished" podID="bd3ee522-830b-4cde-ad14-15bcc28b7d06" containerID="4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d" exitCode=137 Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.929925 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0d7a8-account-delete-265rv" event={"ID":"bd3ee522-830b-4cde-ad14-15bcc28b7d06","Type":"ContainerDied","Data":"4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d"} Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.929951 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell0d7a8-account-delete-265rv" event={"ID":"bd3ee522-830b-4cde-ad14-15bcc28b7d06","Type":"ContainerDied","Data":"198fa3779f2d36503903806904e03b88ca096ae33da44949215b381f38d492eb"} Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.930007 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell0d7a8-account-delete-265rv" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.963170 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell0d7a8-account-delete-265rv"] Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.965092 4876 scope.go:117] "RemoveContainer" containerID="9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2" Dec 15 07:17:05 crc kubenswrapper[4876]: E1215 07:17:05.966562 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2\": container with ID starting with 9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2 not found: ID does not exist" containerID="9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.966591 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2"} err="failed to get container status \"9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2\": rpc error: code = NotFound desc = could not find container \"9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2\": container with ID starting with 9b6d97a5fe06b23baf4f4cc16e7f22f4f27c3c6688a016e60730a06189f575b2 not found: ID does not exist" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.966613 4876 scope.go:117] "RemoveContainer" containerID="4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.969338 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell0d7a8-account-delete-265rv"] Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.982558 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutrona915-account-delete-gswz2" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.989389 4876 scope.go:117] "RemoveContainer" containerID="4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d" Dec 15 07:17:05 crc kubenswrapper[4876]: E1215 07:17:05.989612 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d\": container with ID starting with 4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d not found: ID does not exist" containerID="4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d" Dec 15 07:17:05 crc kubenswrapper[4876]: I1215 07:17:05.989639 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d"} err="failed to get container status \"4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d\": rpc error: code = NotFound desc = could not find container \"4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d\": container with ID starting with 4c1133b24c20f06fc70b41788f7704cc14065be63ab899b1ec7a1e099c9e225d not found: ID does not exist" Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.132072 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts\") pod \"d5c5b73f-18fa-4bef-b443-15328769a818\" (UID: \"d5c5b73f-18fa-4bef-b443-15328769a818\") " Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.132181 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nb254\" (UniqueName: \"kubernetes.io/projected/d5c5b73f-18fa-4bef-b443-15328769a818-kube-api-access-nb254\") pod \"d5c5b73f-18fa-4bef-b443-15328769a818\" (UID: \"d5c5b73f-18fa-4bef-b443-15328769a818\") " Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.133410 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d5c5b73f-18fa-4bef-b443-15328769a818" (UID: "d5c5b73f-18fa-4bef-b443-15328769a818"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.136895 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5c5b73f-18fa-4bef-b443-15328769a818-kube-api-access-nb254" (OuterVolumeSpecName: "kube-api-access-nb254") pod "d5c5b73f-18fa-4bef-b443-15328769a818" (UID: "d5c5b73f-18fa-4bef-b443-15328769a818"). InnerVolumeSpecName "kube-api-access-nb254". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.234193 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5c5b73f-18fa-4bef-b443-15328769a818-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.234264 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nb254\" (UniqueName: \"kubernetes.io/projected/d5c5b73f-18fa-4bef-b443-15328769a818-kube-api-access-nb254\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.715919 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1501e554-af82-4179-959b-475a30b910d5" path="/var/lib/kubelet/pods/1501e554-af82-4179-959b-475a30b910d5/volumes" Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.716924 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="478129d4-b31d-47e4-a6d4-b4aa7416356e" path="/var/lib/kubelet/pods/478129d4-b31d-47e4-a6d4-b4aa7416356e/volumes" Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.717485 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd3ee522-830b-4cde-ad14-15bcc28b7d06" path="/var/lib/kubelet/pods/bd3ee522-830b-4cde-ad14-15bcc28b7d06/volumes" Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.948879 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutrona915-account-delete-gswz2" event={"ID":"d5c5b73f-18fa-4bef-b443-15328769a818","Type":"ContainerDied","Data":"9be7ac597f2cf9e90dd302b2eeea88feed25d6c598eb222d04e5fb3791882102"} Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.949212 4876 scope.go:117] "RemoveContainer" containerID="d0d09eace30f3a8203157172430444b7a9b0b3b017388393bd092a8a917b1184" Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.948899 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutrona915-account-delete-gswz2" Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.972100 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutrona915-account-delete-gswz2"] Dec 15 07:17:06 crc kubenswrapper[4876]: I1215 07:17:06.977705 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutrona915-account-delete-gswz2"] Dec 15 07:17:08 crc kubenswrapper[4876]: I1215 07:17:08.718514 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5c5b73f-18fa-4bef-b443-15328769a818" path="/var/lib/kubelet/pods/d5c5b73f-18fa-4bef-b443-15328769a818/volumes" Dec 15 07:17:11 crc kubenswrapper[4876]: I1215 07:17:11.705915 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:17:11 crc kubenswrapper[4876]: E1215 07:17:11.706438 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.148340 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zmht9"] Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149285 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" containerName="glance-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149305 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" containerName="glance-log" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149322 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-reaper" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149330 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-reaper" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149343 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-auditor" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149352 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-auditor" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149367 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149375 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149387 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-auditor" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149394 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-auditor" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149403 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerName="barbican-api-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149410 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerName="barbican-api-log" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149421 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149427 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149438 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerName="nova-api-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149445 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerName="nova-api-api" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149453 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" containerName="nova-cell1-conductor-conductor" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149460 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" containerName="nova-cell1-conductor-conductor" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149473 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149481 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149492 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-metadata" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149500 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-metadata" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149511 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerName="cinder-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149518 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerName="cinder-api" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149527 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" containerName="glance-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149534 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" containerName="glance-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149543 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-replicator" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149551 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-replicator" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149562 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149569 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149579 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="ceilometer-central-agent" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149589 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="ceilometer-central-agent" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149602 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerName="nova-api-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149609 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerName="nova-api-log" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149619 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-updater" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149626 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-updater" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149638 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="769bb0f8-96fe-485a-adfd-e51747bbff86" containerName="proxy-server" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149646 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="769bb0f8-96fe-485a-adfd-e51747bbff86" containerName="proxy-server" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149658 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a01ad7f-0924-4b3d-ba95-b5e599f343ee" containerName="nova-cell0-conductor-conductor" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149665 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a01ad7f-0924-4b3d-ba95-b5e599f343ee" containerName="nova-cell0-conductor-conductor" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149679 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" containerName="glance-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149687 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" containerName="glance-log" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149698 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerName="neutron-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149706 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerName="neutron-api" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149716 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="478129d4-b31d-47e4-a6d4-b4aa7416356e" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149723 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="478129d4-b31d-47e4-a6d4-b4aa7416356e" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149732 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a1020a1-7afe-46ee-b5c4-40a9290a05e1" containerName="rabbitmq" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149738 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a1020a1-7afe-46ee-b5c4-40a9290a05e1" containerName="rabbitmq" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149752 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7e97610-516d-4609-911c-53124ace7db0" containerName="barbican-keystone-listener-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149759 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7e97610-516d-4609-911c-53124ace7db0" containerName="barbican-keystone-listener-log" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149769 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-replicator" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149777 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-replicator" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149785 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a1020a1-7afe-46ee-b5c4-40a9290a05e1" containerName="setup-container" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149792 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a1020a1-7afe-46ee-b5c4-40a9290a05e1" containerName="setup-container" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149806 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23569e4d-9d69-4947-9293-50d1667c1eda" containerName="placement-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149814 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="23569e4d-9d69-4947-9293-50d1667c1eda" containerName="placement-api" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149824 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b75337-8832-415e-a91f-2f8edd407cf1" containerName="probe" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149832 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b75337-8832-415e-a91f-2f8edd407cf1" containerName="probe" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149845 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a65a162-9f80-4d1c-be2c-001dedcb5391" containerName="galera" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149852 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a65a162-9f80-4d1c-be2c-001dedcb5391" containerName="galera" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149866 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerName="ovn-northd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149873 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerName="ovn-northd" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149880 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="ceilometer-notification-agent" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149888 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="ceilometer-notification-agent" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149901 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149908 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149921 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerName="neutron-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149928 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerName="neutron-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149940 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="233c0ff5-7edd-41c4-8e16-7de48b9fc76c" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149948 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="233c0ff5-7edd-41c4-8e16-7de48b9fc76c" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149958 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db3f4964-0cca-4527-93de-457292de4be7" containerName="rabbitmq" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149965 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="db3f4964-0cca-4527-93de-457292de4be7" containerName="rabbitmq" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149979 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1501e554-af82-4179-959b-475a30b910d5" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.149987 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="1501e554-af82-4179-959b-475a30b910d5" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.149998 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-server" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150006 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-server" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150018 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b7ba44e-d87f-4b10-a601-eb425af47a70" containerName="barbican-worker-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150025 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b7ba44e-d87f-4b10-a601-eb425af47a70" containerName="barbican-worker-log" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150036 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1" containerName="kube-state-metrics" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150043 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1" containerName="kube-state-metrics" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150075 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1503039d-0445-46ac-81ca-5af528a46ce2" containerName="galera" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150083 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="1503039d-0445-46ac-81ca-5af528a46ce2" containerName="galera" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150096 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="rsync" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150118 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="rsync" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150132 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150139 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150151 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7e97610-516d-4609-911c-53124ace7db0" containerName="barbican-keystone-listener" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150159 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7e97610-516d-4609-911c-53124ace7db0" containerName="barbican-keystone-listener" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150171 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="swift-recon-cron" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150178 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="swift-recon-cron" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150190 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="769bb0f8-96fe-485a-adfd-e51747bbff86" containerName="proxy-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150198 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="769bb0f8-96fe-485a-adfd-e51747bbff86" containerName="proxy-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150208 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23569e4d-9d69-4947-9293-50d1667c1eda" containerName="placement-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150215 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="23569e4d-9d69-4947-9293-50d1667c1eda" containerName="placement-log" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150228 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d724d787-7189-4c81-94bf-08c2904deaf9" containerName="keystone-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150236 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d724d787-7189-4c81-94bf-08c2904deaf9" containerName="keystone-api" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150248 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" containerName="nova-scheduler-scheduler" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150255 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" containerName="nova-scheduler-scheduler" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150270 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-updater" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150278 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-updater" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150287 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="sg-core" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150294 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="sg-core" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150303 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150310 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-log" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150321 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerName="ovsdbserver-sb" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150330 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerName="ovsdbserver-sb" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150339 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" containerName="nova-cell1-novncproxy-novncproxy" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150346 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" containerName="nova-cell1-novncproxy-novncproxy" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150358 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerName="ovsdbserver-nb" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150364 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerName="ovsdbserver-nb" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150373 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd0403aa-07db-43b1-8df6-6317130cbd53" containerName="memcached" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150380 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd0403aa-07db-43b1-8df6-6317130cbd53" containerName="memcached" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150391 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="proxy-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150398 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="proxy-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150407 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db3f4964-0cca-4527-93de-457292de4be7" containerName="setup-container" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150414 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="db3f4964-0cca-4527-93de-457292de4be7" containerName="setup-container" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150426 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a65a162-9f80-4d1c-be2c-001dedcb5391" containerName="mysql-bootstrap" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150446 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a65a162-9f80-4d1c-be2c-001dedcb5391" containerName="mysql-bootstrap" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150459 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-server" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150466 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-server" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150477 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d24022bd-28ab-402d-9078-c52208891ef8" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150485 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d24022bd-28ab-402d-9078-c52208891ef8" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150495 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150502 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150513 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-auditor" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150522 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-auditor" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150531 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerName="cinder-api-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150538 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerName="cinder-api-log" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150550 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd3ee522-830b-4cde-ad14-15bcc28b7d06" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150558 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd3ee522-830b-4cde-ad14-15bcc28b7d06" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150570 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df0aba13-046d-4950-bfa3-c873c535847f" containerName="ovn-controller" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150578 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="df0aba13-046d-4950-bfa3-c873c535847f" containerName="ovn-controller" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150588 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db2340b9-e33b-432a-a1e6-df022337da1c" containerName="init" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150595 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="db2340b9-e33b-432a-a1e6-df022337da1c" containerName="init" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150605 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-replicator" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150613 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-replicator" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150623 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" containerName="glance-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150630 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" containerName="glance-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150643 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-expirer" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150650 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-expirer" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150658 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerName="barbican-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150666 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerName="barbican-api" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150678 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b75337-8832-415e-a91f-2f8edd407cf1" containerName="cinder-scheduler" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150686 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b75337-8832-415e-a91f-2f8edd407cf1" containerName="cinder-scheduler" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150698 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1503039d-0445-46ac-81ca-5af528a46ce2" containerName="mysql-bootstrap" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150706 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="1503039d-0445-46ac-81ca-5af528a46ce2" containerName="mysql-bootstrap" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150911 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server-init" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150922 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server-init" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150934 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-server" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150941 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-server" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150950 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b7ba44e-d87f-4b10-a601-eb425af47a70" containerName="barbican-worker" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150958 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b7ba44e-d87f-4b10-a601-eb425af47a70" containerName="barbican-worker" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.150969 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db2340b9-e33b-432a-a1e6-df022337da1c" containerName="dnsmasq-dns" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.150977 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="db2340b9-e33b-432a-a1e6-df022337da1c" containerName="dnsmasq-dns" Dec 15 07:17:20 crc kubenswrapper[4876]: E1215 07:17:20.151002 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5c5b73f-18fa-4bef-b443-15328769a818" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151010 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5c5b73f-18fa-4bef-b443-15328769a818" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151217 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="87fa47d9-4ce3-4233-baf3-a5b86ab26dd1" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151234 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-updater" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151249 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a01ad7f-0924-4b3d-ba95-b5e599f343ee" containerName="nova-cell0-conductor-conductor" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151261 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerName="nova-api-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151272 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-metadata" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151284 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="23569e4d-9d69-4947-9293-50d1667c1eda" containerName="placement-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151295 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="swift-recon-cron" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151308 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" containerName="glance-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151317 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="df0aba13-046d-4950-bfa3-c873c535847f" containerName="ovn-controller" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151327 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="db3f4964-0cca-4527-93de-457292de4be7" containerName="rabbitmq" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151336 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerName="neutron-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151348 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="23569e4d-9d69-4947-9293-50d1667c1eda" containerName="placement-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151356 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="ceilometer-central-agent" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151364 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovs-vswitchd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151375 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="39f17917-e5e7-4c0b-bddb-d7ce2fbc71d1" containerName="kube-state-metrics" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151383 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="60c4c5c4-fe8f-45a7-8d0a-78976c3383a7" containerName="nova-scheduler-scheduler" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151392 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="proxy-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151401 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="1639e7c7-83c0-4a60-9f59-5b31772b9f35" containerName="glance-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151411 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151421 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-auditor" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151435 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d724d787-7189-4c81-94bf-08c2904deaf9" containerName="keystone-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151450 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e9b5486-d793-41dc-b5f1-1f2085d7db79" containerName="neutron-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151458 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3eb0262-ff07-4e0a-8e3b-9b147ccf7e71" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151466 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="233c0ff5-7edd-41c4-8e16-7de48b9fc76c" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151478 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d24022bd-28ab-402d-9078-c52208891ef8" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151487 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a1020a1-7afe-46ee-b5c4-40a9290a05e1" containerName="rabbitmq" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151499 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-replicator" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151510 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" containerName="glance-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151517 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerName="cinder-api-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151525 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerName="ovsdbserver-sb" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151534 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerName="barbican-api-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151544 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2f6914b-2d26-4417-a7c0-21eaf29a18bf" containerName="ovsdb-server" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151552 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="478129d4-b31d-47e4-a6d4-b4aa7416356e" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151564 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b7ba44e-d87f-4b10-a601-eb425af47a70" containerName="barbican-worker" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151576 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-auditor" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151588 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-auditor" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151598 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-server" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151606 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-server" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151616 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="79b75337-8832-415e-a91f-2f8edd407cf1" containerName="cinder-scheduler" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151626 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd3ee522-830b-4cde-ad14-15bcc28b7d06" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151634 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-expirer" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151642 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="769bb0f8-96fe-485a-adfd-e51747bbff86" containerName="proxy-httpd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151650 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="sg-core" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151658 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7e97610-516d-4609-911c-53124ace7db0" containerName="barbican-keystone-listener-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151670 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fc3485b-9f78-40d0-b864-b40626fdba7c" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151678 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="db2340b9-e33b-432a-a1e6-df022337da1c" containerName="dnsmasq-dns" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151689 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="83083084-55fd-4e95-87bf-bebcc2d41fb8" containerName="nova-metadata-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151701 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="189a34a7-d451-4c26-b84e-5b056fe1e93b" containerName="ovsdbserver-nb" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151708 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="rsync" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151720 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="account-reaper" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151732 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerName="openstack-network-exporter" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151742 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7e97610-516d-4609-911c-53124ace7db0" containerName="barbican-keystone-listener" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151756 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc226c84-f3f6-47dc-ba09-3a79dd00e5d0" containerName="nova-api-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151764 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="1501e554-af82-4179-959b-475a30b910d5" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151775 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="769bb0f8-96fe-485a-adfd-e51747bbff86" containerName="proxy-server" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151787 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbe2efa0-0b59-42cb-93d7-3540f4b03613" containerName="ovn-northd" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151797 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="79b75337-8832-415e-a91f-2f8edd407cf1" containerName="probe" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151807 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5c5b73f-18fa-4bef-b443-15328769a818" containerName="mariadb-account-delete" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151819 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-replicator" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151831 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcf47e8b-49d3-45cb-a496-b3a5a256cc5c" containerName="cinder-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151842 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3aa1f78c-0be9-4a9c-b4fd-0d7ffe80b774" containerName="barbican-api" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151851 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-replicator" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151858 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd0403aa-07db-43b1-8df6-6317130cbd53" containerName="memcached" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151866 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="efef2015-cf60-49ff-b90d-f1120822806c" containerName="ceilometer-notification-agent" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151876 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f937a46-5b8e-4d1c-bd05-3b729ffb8188" containerName="glance-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151885 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="object-server" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151896 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="1503039d-0445-46ac-81ca-5af528a46ce2" containerName="galera" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151906 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b7ba44e-d87f-4b10-a601-eb425af47a70" containerName="barbican-worker-log" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151915 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ecca0e4-3e21-43a7-90ce-9235fb7e46a2" containerName="nova-cell1-conductor-conductor" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151925 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a65a162-9f80-4d1c-be2c-001dedcb5391" containerName="galera" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151934 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91d3fea-2b02-48ad-b238-7a815dd36d22" containerName="container-updater" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.151945 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba4acd07-a5cb-47a7-9f1f-0bc818d3f738" containerName="nova-cell1-novncproxy-novncproxy" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.153081 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.188302 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zmht9"] Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.230228 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-utilities\") pod \"certified-operators-zmht9\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.230411 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-catalog-content\") pod \"certified-operators-zmht9\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.230457 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5bdk\" (UniqueName: \"kubernetes.io/projected/11f85656-55aa-489a-b534-9b4bc6b6c59a-kube-api-access-t5bdk\") pod \"certified-operators-zmht9\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.332411 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-utilities\") pod \"certified-operators-zmht9\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.332477 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-catalog-content\") pod \"certified-operators-zmht9\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.332496 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5bdk\" (UniqueName: \"kubernetes.io/projected/11f85656-55aa-489a-b534-9b4bc6b6c59a-kube-api-access-t5bdk\") pod \"certified-operators-zmht9\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.332884 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-utilities\") pod \"certified-operators-zmht9\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.333070 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-catalog-content\") pod \"certified-operators-zmht9\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.353732 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5bdk\" (UniqueName: \"kubernetes.io/projected/11f85656-55aa-489a-b534-9b4bc6b6c59a-kube-api-access-t5bdk\") pod \"certified-operators-zmht9\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:20 crc kubenswrapper[4876]: I1215 07:17:20.491932 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:21 crc kubenswrapper[4876]: I1215 07:17:21.003394 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zmht9"] Dec 15 07:17:21 crc kubenswrapper[4876]: I1215 07:17:21.085054 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmht9" event={"ID":"11f85656-55aa-489a-b534-9b4bc6b6c59a","Type":"ContainerStarted","Data":"ce464ebf965b66278f99b4a6d364ab6eb5c921e616a3157755db31447e8216de"} Dec 15 07:17:22 crc kubenswrapper[4876]: I1215 07:17:22.095601 4876 generic.go:334] "Generic (PLEG): container finished" podID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerID="484fae8d2aea949609e19c8b5566c15c9fd82b22857008cac4155f3a9a714e54" exitCode=0 Dec 15 07:17:22 crc kubenswrapper[4876]: I1215 07:17:22.095655 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmht9" event={"ID":"11f85656-55aa-489a-b534-9b4bc6b6c59a","Type":"ContainerDied","Data":"484fae8d2aea949609e19c8b5566c15c9fd82b22857008cac4155f3a9a714e54"} Dec 15 07:17:23 crc kubenswrapper[4876]: I1215 07:17:23.104989 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmht9" event={"ID":"11f85656-55aa-489a-b534-9b4bc6b6c59a","Type":"ContainerStarted","Data":"b38e7aaec3a08e3c0deff2fdecccf3d44c33db80a30293d2f2878e9cb3c29ecb"} Dec 15 07:17:23 crc kubenswrapper[4876]: E1215 07:17:23.281537 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod11f85656_55aa_489a_b534_9b4bc6b6c59a.slice/crio-b38e7aaec3a08e3c0deff2fdecccf3d44c33db80a30293d2f2878e9cb3c29ecb.scope\": RecentStats: unable to find data in memory cache]" Dec 15 07:17:24 crc kubenswrapper[4876]: I1215 07:17:24.115414 4876 generic.go:334] "Generic (PLEG): container finished" podID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerID="b38e7aaec3a08e3c0deff2fdecccf3d44c33db80a30293d2f2878e9cb3c29ecb" exitCode=0 Dec 15 07:17:24 crc kubenswrapper[4876]: I1215 07:17:24.115476 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmht9" event={"ID":"11f85656-55aa-489a-b534-9b4bc6b6c59a","Type":"ContainerDied","Data":"b38e7aaec3a08e3c0deff2fdecccf3d44c33db80a30293d2f2878e9cb3c29ecb"} Dec 15 07:17:25 crc kubenswrapper[4876]: I1215 07:17:25.126508 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmht9" event={"ID":"11f85656-55aa-489a-b534-9b4bc6b6c59a","Type":"ContainerStarted","Data":"38803a957e59f4372fd96b6de3a2061d5e685af4f5c6fad80768b62b5bc25d20"} Dec 15 07:17:25 crc kubenswrapper[4876]: I1215 07:17:25.148135 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zmht9" podStartSLOduration=2.718183264 podStartE2EDuration="5.148117894s" podCreationTimestamp="2025-12-15 07:17:20 +0000 UTC" firstStartedPulling="2025-12-15 07:17:22.097992851 +0000 UTC m=+1567.669135802" lastFinishedPulling="2025-12-15 07:17:24.527927501 +0000 UTC m=+1570.099070432" observedRunningTime="2025-12-15 07:17:25.145055454 +0000 UTC m=+1570.716198385" watchObservedRunningTime="2025-12-15 07:17:25.148117894 +0000 UTC m=+1570.719260825" Dec 15 07:17:25 crc kubenswrapper[4876]: I1215 07:17:25.706152 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:17:25 crc kubenswrapper[4876]: E1215 07:17:25.706330 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:17:30 crc kubenswrapper[4876]: I1215 07:17:30.496007 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:30 crc kubenswrapper[4876]: I1215 07:17:30.497597 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:30 crc kubenswrapper[4876]: I1215 07:17:30.555904 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:31 crc kubenswrapper[4876]: I1215 07:17:31.265563 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:31 crc kubenswrapper[4876]: I1215 07:17:31.316014 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zmht9"] Dec 15 07:17:33 crc kubenswrapper[4876]: I1215 07:17:33.211955 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zmht9" podUID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerName="registry-server" containerID="cri-o://38803a957e59f4372fd96b6de3a2061d5e685af4f5c6fad80768b62b5bc25d20" gracePeriod=2 Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.222877 4876 generic.go:334] "Generic (PLEG): container finished" podID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerID="38803a957e59f4372fd96b6de3a2061d5e685af4f5c6fad80768b62b5bc25d20" exitCode=0 Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.222942 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmht9" event={"ID":"11f85656-55aa-489a-b534-9b4bc6b6c59a","Type":"ContainerDied","Data":"38803a957e59f4372fd96b6de3a2061d5e685af4f5c6fad80768b62b5bc25d20"} Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.294445 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.442727 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-catalog-content\") pod \"11f85656-55aa-489a-b534-9b4bc6b6c59a\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.442859 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5bdk\" (UniqueName: \"kubernetes.io/projected/11f85656-55aa-489a-b534-9b4bc6b6c59a-kube-api-access-t5bdk\") pod \"11f85656-55aa-489a-b534-9b4bc6b6c59a\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.442881 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-utilities\") pod \"11f85656-55aa-489a-b534-9b4bc6b6c59a\" (UID: \"11f85656-55aa-489a-b534-9b4bc6b6c59a\") " Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.443849 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-utilities" (OuterVolumeSpecName: "utilities") pod "11f85656-55aa-489a-b534-9b4bc6b6c59a" (UID: "11f85656-55aa-489a-b534-9b4bc6b6c59a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.447610 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11f85656-55aa-489a-b534-9b4bc6b6c59a-kube-api-access-t5bdk" (OuterVolumeSpecName: "kube-api-access-t5bdk") pod "11f85656-55aa-489a-b534-9b4bc6b6c59a" (UID: "11f85656-55aa-489a-b534-9b4bc6b6c59a"). InnerVolumeSpecName "kube-api-access-t5bdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.511359 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11f85656-55aa-489a-b534-9b4bc6b6c59a" (UID: "11f85656-55aa-489a-b534-9b4bc6b6c59a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.544254 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5bdk\" (UniqueName: \"kubernetes.io/projected/11f85656-55aa-489a-b534-9b4bc6b6c59a-kube-api-access-t5bdk\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.544285 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:34 crc kubenswrapper[4876]: I1215 07:17:34.544295 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11f85656-55aa-489a-b534-9b4bc6b6c59a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:17:35 crc kubenswrapper[4876]: I1215 07:17:35.239405 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zmht9" event={"ID":"11f85656-55aa-489a-b534-9b4bc6b6c59a","Type":"ContainerDied","Data":"ce464ebf965b66278f99b4a6d364ab6eb5c921e616a3157755db31447e8216de"} Dec 15 07:17:35 crc kubenswrapper[4876]: I1215 07:17:35.239468 4876 scope.go:117] "RemoveContainer" containerID="38803a957e59f4372fd96b6de3a2061d5e685af4f5c6fad80768b62b5bc25d20" Dec 15 07:17:35 crc kubenswrapper[4876]: I1215 07:17:35.239504 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zmht9" Dec 15 07:17:35 crc kubenswrapper[4876]: I1215 07:17:35.271454 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zmht9"] Dec 15 07:17:35 crc kubenswrapper[4876]: I1215 07:17:35.278776 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zmht9"] Dec 15 07:17:35 crc kubenswrapper[4876]: I1215 07:17:35.284534 4876 scope.go:117] "RemoveContainer" containerID="b38e7aaec3a08e3c0deff2fdecccf3d44c33db80a30293d2f2878e9cb3c29ecb" Dec 15 07:17:35 crc kubenswrapper[4876]: I1215 07:17:35.309931 4876 scope.go:117] "RemoveContainer" containerID="484fae8d2aea949609e19c8b5566c15c9fd82b22857008cac4155f3a9a714e54" Dec 15 07:17:36 crc kubenswrapper[4876]: I1215 07:17:36.713352 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11f85656-55aa-489a-b534-9b4bc6b6c59a" path="/var/lib/kubelet/pods/11f85656-55aa-489a-b534-9b4bc6b6c59a/volumes" Dec 15 07:17:38 crc kubenswrapper[4876]: I1215 07:17:38.706301 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:17:38 crc kubenswrapper[4876]: E1215 07:17:38.707258 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:17:51 crc kubenswrapper[4876]: I1215 07:17:51.705832 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:17:51 crc kubenswrapper[4876]: E1215 07:17:51.706901 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:18:04 crc kubenswrapper[4876]: I1215 07:18:04.713589 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:18:04 crc kubenswrapper[4876]: E1215 07:18:04.714400 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:18:17 crc kubenswrapper[4876]: I1215 07:18:17.706093 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:18:17 crc kubenswrapper[4876]: E1215 07:18:17.706888 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.205191 4876 scope.go:117] "RemoveContainer" containerID="b76d51a3e29dc17517e20451f413a12e5ed7e5c3573e1a5219731b5743af46c5" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.253364 4876 scope.go:117] "RemoveContainer" containerID="84311730edcfcde730c08a397de2aee0f27365e627e2d75a586ebc62a3484c83" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.279597 4876 scope.go:117] "RemoveContainer" containerID="847f28629996158a7717ee6e842c76acbbe38b89059a464bbb28e36869ac3b2c" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.303719 4876 scope.go:117] "RemoveContainer" containerID="ed46ba082042a7f7e73acc2c7c48ce907308018ef2b2dcd18542a3b290900f04" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.331729 4876 scope.go:117] "RemoveContainer" containerID="69805d5a06cd5524fafd7005c790931dcf3f61348fabbc5ef0cc66ad4bdaf487" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.353829 4876 scope.go:117] "RemoveContainer" containerID="80cabd3fa89c350ac8db85619ee6a53219f9642134cc11ad54167b0ebdc42f33" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.372256 4876 scope.go:117] "RemoveContainer" containerID="c53e04d05e35f7e0da2f7a946c4159eb6b6ceba5eb6aed3cceb8823b3da1ebec" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.389340 4876 scope.go:117] "RemoveContainer" containerID="e5798be0fbda64fc3442d95a2f8d6fa9b84215a52bd5a2e3554dda263af70103" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.412433 4876 scope.go:117] "RemoveContainer" containerID="2177209f937789e4127197c5da2a786211bdf9305cf1b537e47d27d8277e5721" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.433704 4876 scope.go:117] "RemoveContainer" containerID="f844d2a20631c7f7c30aa53e2872ca1d23bbe5a8cc378b87c0c90ee03a7ea3ef" Dec 15 07:18:19 crc kubenswrapper[4876]: I1215 07:18:19.453469 4876 scope.go:117] "RemoveContainer" containerID="05df66cfb1e6136f09e5899bf3239d9ae901f441c38d4c1dde084356b7683117" Dec 15 07:18:32 crc kubenswrapper[4876]: I1215 07:18:32.705298 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:18:32 crc kubenswrapper[4876]: E1215 07:18:32.706459 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:18:46 crc kubenswrapper[4876]: I1215 07:18:46.706531 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:18:46 crc kubenswrapper[4876]: E1215 07:18:46.707861 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:19:01 crc kubenswrapper[4876]: I1215 07:19:01.706335 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:19:01 crc kubenswrapper[4876]: E1215 07:19:01.707481 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:19:12 crc kubenswrapper[4876]: I1215 07:19:12.705669 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:19:12 crc kubenswrapper[4876]: E1215 07:19:12.706892 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.618444 4876 scope.go:117] "RemoveContainer" containerID="5bdafabe22fc8d6baa7ff74db01d85c74e18a8cf0bcd1646fd4db652340f7a95" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.665315 4876 scope.go:117] "RemoveContainer" containerID="c8fe465ead0f1cef4b6668b9d56683285c033b94a8c0ba05de43046e72809fe2" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.715695 4876 scope.go:117] "RemoveContainer" containerID="2c2e940445d6dda54c77baa30e35731e8e3964b78fb838b5e59b11f0b5cb276a" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.732962 4876 scope.go:117] "RemoveContainer" containerID="49d1be0197bd7019b41f95b753ef8f25c732d0358cd8ff9b1cadcccd7de28623" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.778772 4876 scope.go:117] "RemoveContainer" containerID="a9f4a3c24d47c88f332ff32aad0cf2a4aada60f557ecd665944aa01164bc7368" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.799302 4876 scope.go:117] "RemoveContainer" containerID="1beed7fcbdaa351311ae03b42d33cb35cabddbf9032f3b4575067ede39025164" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.826072 4876 scope.go:117] "RemoveContainer" containerID="c8e4c3af47b449c9ddbf86cea9464d5a787639dab63bdd9badeca8df1d278810" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.846988 4876 scope.go:117] "RemoveContainer" containerID="a0a8d5ff29d8c7fb66681edfebb386b1c81d12a16bcc7db3d27a1bac7237e1fd" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.875059 4876 scope.go:117] "RemoveContainer" containerID="7c14998c06187633b99f45ab4b9d37c286bd1a3e2dda2a643a5a6f4a0e1a4dbf" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.897630 4876 scope.go:117] "RemoveContainer" containerID="facab2a461fe55e9056a81da264f9887d011082603b2cc7deb19ddec7d4a94ea" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.914962 4876 scope.go:117] "RemoveContainer" containerID="8b931409e9892662e3816fe38ace9a8974bc0af1f3224b7c5f8239fed95708bc" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.937638 4876 scope.go:117] "RemoveContainer" containerID="0eb1deab71a70bcb348ef12bc6274d54e1904bd98469f82095989401793f7b45" Dec 15 07:19:19 crc kubenswrapper[4876]: I1215 07:19:19.958612 4876 scope.go:117] "RemoveContainer" containerID="0f5d9ca0c1bc15780b7a7bc437cc9333e10e8e53a44d9af71fffcef078b3c8e9" Dec 15 07:19:27 crc kubenswrapper[4876]: I1215 07:19:27.705150 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:19:27 crc kubenswrapper[4876]: E1215 07:19:27.706018 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:19:41 crc kubenswrapper[4876]: I1215 07:19:41.705942 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:19:41 crc kubenswrapper[4876]: E1215 07:19:41.707024 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:19:53 crc kubenswrapper[4876]: I1215 07:19:53.705613 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:19:53 crc kubenswrapper[4876]: E1215 07:19:53.706419 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:20:08 crc kubenswrapper[4876]: I1215 07:20:08.705429 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:20:08 crc kubenswrapper[4876]: E1215 07:20:08.706451 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:20:20 crc kubenswrapper[4876]: I1215 07:20:20.158209 4876 scope.go:117] "RemoveContainer" containerID="e1b05578ca205ace99ce2b9f59076a765fe7abebc45a03c8c6cad9fd601c4197" Dec 15 07:20:20 crc kubenswrapper[4876]: I1215 07:20:20.185155 4876 scope.go:117] "RemoveContainer" containerID="afbe396af5b5d28cb16805ba0df6ca3ec32ddb8528a8b72ae216c9953653f09c" Dec 15 07:20:20 crc kubenswrapper[4876]: I1215 07:20:20.216949 4876 scope.go:117] "RemoveContainer" containerID="b3bd0b7823735f04ec2747afdac419e40693e5f817e9f14ee5782df58a16fb64" Dec 15 07:20:20 crc kubenswrapper[4876]: I1215 07:20:20.247440 4876 scope.go:117] "RemoveContainer" containerID="c5bf69c338d2955649defd7611ec504382e5c3542edee5ca3f56d01332885ae6" Dec 15 07:20:20 crc kubenswrapper[4876]: I1215 07:20:20.273502 4876 scope.go:117] "RemoveContainer" containerID="cc294b31b47b3b2be756a6a68ef1afe2c1758bb4597cea8ea401001afc30deaa" Dec 15 07:20:20 crc kubenswrapper[4876]: I1215 07:20:20.295905 4876 scope.go:117] "RemoveContainer" containerID="6649c600f8a029cdee4c0de05db35b7d2ab1a9c6655e3285739700ef5c19c813" Dec 15 07:20:20 crc kubenswrapper[4876]: I1215 07:20:20.338027 4876 scope.go:117] "RemoveContainer" containerID="6ef07e3c5b93e563fee5ab9c6f22518b4efbc5830572701e2b5b1257e3857c7d" Dec 15 07:20:20 crc kubenswrapper[4876]: I1215 07:20:20.355575 4876 scope.go:117] "RemoveContainer" containerID="64ebd7f515630c3d30ddbca962f75c5a3dc31b0c6d22a8af88bed0a2533b325f" Dec 15 07:20:20 crc kubenswrapper[4876]: I1215 07:20:20.385258 4876 scope.go:117] "RemoveContainer" containerID="6687e14dbc12d2a78d968537c35d1dd9ec16adfd9f027ee295ff8680066dcf82" Dec 15 07:20:22 crc kubenswrapper[4876]: I1215 07:20:22.706947 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:20:22 crc kubenswrapper[4876]: E1215 07:20:22.707838 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:20:34 crc kubenswrapper[4876]: I1215 07:20:34.716488 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:20:34 crc kubenswrapper[4876]: E1215 07:20:34.717222 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:20:46 crc kubenswrapper[4876]: I1215 07:20:46.705285 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:20:46 crc kubenswrapper[4876]: E1215 07:20:46.706028 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.495214 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kl5g7"] Dec 15 07:20:57 crc kubenswrapper[4876]: E1215 07:20:57.496072 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerName="extract-utilities" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.496093 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerName="extract-utilities" Dec 15 07:20:57 crc kubenswrapper[4876]: E1215 07:20:57.496145 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerName="registry-server" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.496158 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerName="registry-server" Dec 15 07:20:57 crc kubenswrapper[4876]: E1215 07:20:57.496174 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerName="extract-content" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.496186 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerName="extract-content" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.496373 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="11f85656-55aa-489a-b534-9b4bc6b6c59a" containerName="registry-server" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.497542 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.512998 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kl5g7"] Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.600262 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfsj7\" (UniqueName: \"kubernetes.io/projected/7dfd3544-72a2-42f2-91f3-3e2d581716ec-kube-api-access-jfsj7\") pod \"community-operators-kl5g7\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.600367 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-catalog-content\") pod \"community-operators-kl5g7\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.600389 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-utilities\") pod \"community-operators-kl5g7\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.701256 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-catalog-content\") pod \"community-operators-kl5g7\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.701304 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-utilities\") pod \"community-operators-kl5g7\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.701337 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfsj7\" (UniqueName: \"kubernetes.io/projected/7dfd3544-72a2-42f2-91f3-3e2d581716ec-kube-api-access-jfsj7\") pod \"community-operators-kl5g7\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.701807 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-catalog-content\") pod \"community-operators-kl5g7\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.701914 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-utilities\") pod \"community-operators-kl5g7\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.721268 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfsj7\" (UniqueName: \"kubernetes.io/projected/7dfd3544-72a2-42f2-91f3-3e2d581716ec-kube-api-access-jfsj7\") pod \"community-operators-kl5g7\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:57 crc kubenswrapper[4876]: I1215 07:20:57.817143 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:20:58 crc kubenswrapper[4876]: I1215 07:20:58.289442 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kl5g7"] Dec 15 07:20:58 crc kubenswrapper[4876]: I1215 07:20:58.705315 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:20:58 crc kubenswrapper[4876]: E1215 07:20:58.705855 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:20:59 crc kubenswrapper[4876]: I1215 07:20:59.050409 4876 generic.go:334] "Generic (PLEG): container finished" podID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerID="0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0" exitCode=0 Dec 15 07:20:59 crc kubenswrapper[4876]: I1215 07:20:59.050450 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kl5g7" event={"ID":"7dfd3544-72a2-42f2-91f3-3e2d581716ec","Type":"ContainerDied","Data":"0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0"} Dec 15 07:20:59 crc kubenswrapper[4876]: I1215 07:20:59.050475 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kl5g7" event={"ID":"7dfd3544-72a2-42f2-91f3-3e2d581716ec","Type":"ContainerStarted","Data":"ccd822fa0e621827966a4965c03ff15061f64b128659700c83832902c0c82685"} Dec 15 07:20:59 crc kubenswrapper[4876]: I1215 07:20:59.053611 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 07:21:00 crc kubenswrapper[4876]: I1215 07:21:00.061981 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kl5g7" event={"ID":"7dfd3544-72a2-42f2-91f3-3e2d581716ec","Type":"ContainerStarted","Data":"d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a"} Dec 15 07:21:01 crc kubenswrapper[4876]: I1215 07:21:01.073749 4876 generic.go:334] "Generic (PLEG): container finished" podID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerID="d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a" exitCode=0 Dec 15 07:21:01 crc kubenswrapper[4876]: I1215 07:21:01.073819 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kl5g7" event={"ID":"7dfd3544-72a2-42f2-91f3-3e2d581716ec","Type":"ContainerDied","Data":"d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a"} Dec 15 07:21:02 crc kubenswrapper[4876]: I1215 07:21:02.086557 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kl5g7" event={"ID":"7dfd3544-72a2-42f2-91f3-3e2d581716ec","Type":"ContainerStarted","Data":"85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360"} Dec 15 07:21:02 crc kubenswrapper[4876]: I1215 07:21:02.121402 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kl5g7" podStartSLOduration=2.390575229 podStartE2EDuration="5.121377809s" podCreationTimestamp="2025-12-15 07:20:57 +0000 UTC" firstStartedPulling="2025-12-15 07:20:59.053408616 +0000 UTC m=+1784.624551527" lastFinishedPulling="2025-12-15 07:21:01.784211186 +0000 UTC m=+1787.355354107" observedRunningTime="2025-12-15 07:21:02.101704536 +0000 UTC m=+1787.672847467" watchObservedRunningTime="2025-12-15 07:21:02.121377809 +0000 UTC m=+1787.692520740" Dec 15 07:21:07 crc kubenswrapper[4876]: I1215 07:21:07.817864 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:21:07 crc kubenswrapper[4876]: I1215 07:21:07.818469 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:21:07 crc kubenswrapper[4876]: I1215 07:21:07.888426 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:21:08 crc kubenswrapper[4876]: I1215 07:21:08.192241 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:21:08 crc kubenswrapper[4876]: I1215 07:21:08.249831 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kl5g7"] Dec 15 07:21:10 crc kubenswrapper[4876]: I1215 07:21:10.152164 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kl5g7" podUID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerName="registry-server" containerID="cri-o://85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360" gracePeriod=2 Dec 15 07:21:10 crc kubenswrapper[4876]: I1215 07:21:10.705957 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:21:10 crc kubenswrapper[4876]: E1215 07:21:10.706823 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.095707 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.163937 4876 generic.go:334] "Generic (PLEG): container finished" podID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerID="85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360" exitCode=0 Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.164183 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kl5g7" event={"ID":"7dfd3544-72a2-42f2-91f3-3e2d581716ec","Type":"ContainerDied","Data":"85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360"} Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.164219 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kl5g7" event={"ID":"7dfd3544-72a2-42f2-91f3-3e2d581716ec","Type":"ContainerDied","Data":"ccd822fa0e621827966a4965c03ff15061f64b128659700c83832902c0c82685"} Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.164242 4876 scope.go:117] "RemoveContainer" containerID="85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.164269 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kl5g7" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.182204 4876 scope.go:117] "RemoveContainer" containerID="d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.203255 4876 scope.go:117] "RemoveContainer" containerID="0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.206922 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfsj7\" (UniqueName: \"kubernetes.io/projected/7dfd3544-72a2-42f2-91f3-3e2d581716ec-kube-api-access-jfsj7\") pod \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.206955 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-catalog-content\") pod \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.206975 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-utilities\") pod \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\" (UID: \"7dfd3544-72a2-42f2-91f3-3e2d581716ec\") " Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.208607 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-utilities" (OuterVolumeSpecName: "utilities") pod "7dfd3544-72a2-42f2-91f3-3e2d581716ec" (UID: "7dfd3544-72a2-42f2-91f3-3e2d581716ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.214249 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dfd3544-72a2-42f2-91f3-3e2d581716ec-kube-api-access-jfsj7" (OuterVolumeSpecName: "kube-api-access-jfsj7") pod "7dfd3544-72a2-42f2-91f3-3e2d581716ec" (UID: "7dfd3544-72a2-42f2-91f3-3e2d581716ec"). InnerVolumeSpecName "kube-api-access-jfsj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.260481 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7dfd3544-72a2-42f2-91f3-3e2d581716ec" (UID: "7dfd3544-72a2-42f2-91f3-3e2d581716ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.262056 4876 scope.go:117] "RemoveContainer" containerID="85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360" Dec 15 07:21:11 crc kubenswrapper[4876]: E1215 07:21:11.262539 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360\": container with ID starting with 85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360 not found: ID does not exist" containerID="85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.262582 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360"} err="failed to get container status \"85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360\": rpc error: code = NotFound desc = could not find container \"85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360\": container with ID starting with 85ae4dc428ab37074a4f5f2c9e78bd8d78d7e953c4bab3e6fe3e96085a900360 not found: ID does not exist" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.262610 4876 scope.go:117] "RemoveContainer" containerID="d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a" Dec 15 07:21:11 crc kubenswrapper[4876]: E1215 07:21:11.262968 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a\": container with ID starting with d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a not found: ID does not exist" containerID="d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.263014 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a"} err="failed to get container status \"d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a\": rpc error: code = NotFound desc = could not find container \"d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a\": container with ID starting with d1084f7d80980441e0edc71c0cdc9c291c0cc589fb474f71b405d11c755d777a not found: ID does not exist" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.263038 4876 scope.go:117] "RemoveContainer" containerID="0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0" Dec 15 07:21:11 crc kubenswrapper[4876]: E1215 07:21:11.263557 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0\": container with ID starting with 0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0 not found: ID does not exist" containerID="0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.263583 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0"} err="failed to get container status \"0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0\": rpc error: code = NotFound desc = could not find container \"0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0\": container with ID starting with 0e90b0d8a391e30f5fb9270e8ed49fe4fb430f1796acae16e31936a483f2a6f0 not found: ID does not exist" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.308465 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfsj7\" (UniqueName: \"kubernetes.io/projected/7dfd3544-72a2-42f2-91f3-3e2d581716ec-kube-api-access-jfsj7\") on node \"crc\" DevicePath \"\"" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.308498 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.308509 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dfd3544-72a2-42f2-91f3-3e2d581716ec-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.513320 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kl5g7"] Dec 15 07:21:11 crc kubenswrapper[4876]: I1215 07:21:11.523145 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kl5g7"] Dec 15 07:21:12 crc kubenswrapper[4876]: I1215 07:21:12.719602 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" path="/var/lib/kubelet/pods/7dfd3544-72a2-42f2-91f3-3e2d581716ec/volumes" Dec 15 07:21:20 crc kubenswrapper[4876]: I1215 07:21:20.570479 4876 scope.go:117] "RemoveContainer" containerID="929292f41d8a9a781bbba6431a1271b170eeff8362239c0fd03c074a8f5532c4" Dec 15 07:21:20 crc kubenswrapper[4876]: I1215 07:21:20.624865 4876 scope.go:117] "RemoveContainer" containerID="266b115ff9869144a93e2d16d75e5aa44cf4050ac7af88dd9b06571e1851f9c3" Dec 15 07:21:20 crc kubenswrapper[4876]: I1215 07:21:20.680464 4876 scope.go:117] "RemoveContainer" containerID="9358846d442c82631bff2771fc2bcff89a0df61ba8e8aa255a9bf5adbd4fec9c" Dec 15 07:21:22 crc kubenswrapper[4876]: I1215 07:21:22.705847 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:21:22 crc kubenswrapper[4876]: E1215 07:21:22.706331 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:21:34 crc kubenswrapper[4876]: I1215 07:21:34.709828 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:21:36 crc kubenswrapper[4876]: I1215 07:21:36.385593 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"306570a9232acef7a0cfba5e03342446c03cab0d4458c14f4cdfe2041d8911e0"} Dec 15 07:22:20 crc kubenswrapper[4876]: I1215 07:22:20.767510 4876 scope.go:117] "RemoveContainer" containerID="1138bf311330adc926716e644beff1672cd4d4c45348c5a819155d3c4981d787" Dec 15 07:23:20 crc kubenswrapper[4876]: I1215 07:23:20.856419 4876 scope.go:117] "RemoveContainer" containerID="f0dfdb4d9e96656537226abcfccedda1e0b17902fdf09faf03792277c676a36e" Dec 15 07:23:20 crc kubenswrapper[4876]: I1215 07:23:20.881255 4876 scope.go:117] "RemoveContainer" containerID="49c1ed2ed8b735625be1e2057223049f7cfed2672d833f8281a8f3dd7362e83f" Dec 15 07:23:20 crc kubenswrapper[4876]: I1215 07:23:20.907880 4876 scope.go:117] "RemoveContainer" containerID="db9317417bd58e1f6f0e55391ae407bf3a99d22c99c1e41049173057f98cdb3b" Dec 15 07:23:57 crc kubenswrapper[4876]: I1215 07:23:57.322950 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:23:57 crc kubenswrapper[4876]: I1215 07:23:57.324015 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:24:27 crc kubenswrapper[4876]: I1215 07:24:27.322683 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:24:27 crc kubenswrapper[4876]: I1215 07:24:27.323344 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:24:57 crc kubenswrapper[4876]: I1215 07:24:57.322473 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:24:57 crc kubenswrapper[4876]: I1215 07:24:57.323306 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:24:57 crc kubenswrapper[4876]: I1215 07:24:57.323421 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:24:57 crc kubenswrapper[4876]: I1215 07:24:57.324610 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"306570a9232acef7a0cfba5e03342446c03cab0d4458c14f4cdfe2041d8911e0"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:24:57 crc kubenswrapper[4876]: I1215 07:24:57.324715 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://306570a9232acef7a0cfba5e03342446c03cab0d4458c14f4cdfe2041d8911e0" gracePeriod=600 Dec 15 07:24:58 crc kubenswrapper[4876]: I1215 07:24:58.124693 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="306570a9232acef7a0cfba5e03342446c03cab0d4458c14f4cdfe2041d8911e0" exitCode=0 Dec 15 07:24:58 crc kubenswrapper[4876]: I1215 07:24:58.124797 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"306570a9232acef7a0cfba5e03342446c03cab0d4458c14f4cdfe2041d8911e0"} Dec 15 07:24:58 crc kubenswrapper[4876]: I1215 07:24:58.125168 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae"} Dec 15 07:24:58 crc kubenswrapper[4876]: I1215 07:24:58.125199 4876 scope.go:117] "RemoveContainer" containerID="ac9e7c10b08ffa3e1c6384246577fa9ea67686a4758f568cfd15c4a5c1938611" Dec 15 07:25:30 crc kubenswrapper[4876]: I1215 07:25:30.851220 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pxsk9"] Dec 15 07:25:30 crc kubenswrapper[4876]: E1215 07:25:30.852136 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerName="extract-utilities" Dec 15 07:25:30 crc kubenswrapper[4876]: I1215 07:25:30.852154 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerName="extract-utilities" Dec 15 07:25:30 crc kubenswrapper[4876]: E1215 07:25:30.852180 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerName="registry-server" Dec 15 07:25:30 crc kubenswrapper[4876]: I1215 07:25:30.852188 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerName="registry-server" Dec 15 07:25:30 crc kubenswrapper[4876]: E1215 07:25:30.852235 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerName="extract-content" Dec 15 07:25:30 crc kubenswrapper[4876]: I1215 07:25:30.852242 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerName="extract-content" Dec 15 07:25:30 crc kubenswrapper[4876]: I1215 07:25:30.852606 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dfd3544-72a2-42f2-91f3-3e2d581716ec" containerName="registry-server" Dec 15 07:25:30 crc kubenswrapper[4876]: I1215 07:25:30.853919 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:30 crc kubenswrapper[4876]: I1215 07:25:30.871316 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pxsk9"] Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.044671 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-catalog-content\") pod \"redhat-operators-pxsk9\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.044738 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2h98\" (UniqueName: \"kubernetes.io/projected/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-kube-api-access-b2h98\") pod \"redhat-operators-pxsk9\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.044933 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-utilities\") pod \"redhat-operators-pxsk9\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.145967 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-utilities\") pod \"redhat-operators-pxsk9\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.146056 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-catalog-content\") pod \"redhat-operators-pxsk9\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.146119 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2h98\" (UniqueName: \"kubernetes.io/projected/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-kube-api-access-b2h98\") pod \"redhat-operators-pxsk9\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.146663 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-utilities\") pod \"redhat-operators-pxsk9\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.146705 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-catalog-content\") pod \"redhat-operators-pxsk9\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.168840 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2h98\" (UniqueName: \"kubernetes.io/projected/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-kube-api-access-b2h98\") pod \"redhat-operators-pxsk9\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.175461 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:31 crc kubenswrapper[4876]: I1215 07:25:31.747999 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pxsk9"] Dec 15 07:25:31 crc kubenswrapper[4876]: W1215 07:25:31.752439 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podede9bb78_3754_4b3e_b8d8_ed1e5563f59f.slice/crio-c5e8311324d014ecf71210fbc1091982289036e5991e241146132a49d20881f4 WatchSource:0}: Error finding container c5e8311324d014ecf71210fbc1091982289036e5991e241146132a49d20881f4: Status 404 returned error can't find the container with id c5e8311324d014ecf71210fbc1091982289036e5991e241146132a49d20881f4 Dec 15 07:25:32 crc kubenswrapper[4876]: I1215 07:25:32.579263 4876 generic.go:334] "Generic (PLEG): container finished" podID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerID="d3e08c3e85f56bcbeae197aff75ba3f157670cf40d0d925f7dd6c2b4ab2a96ac" exitCode=0 Dec 15 07:25:32 crc kubenswrapper[4876]: I1215 07:25:32.579401 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxsk9" event={"ID":"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f","Type":"ContainerDied","Data":"d3e08c3e85f56bcbeae197aff75ba3f157670cf40d0d925f7dd6c2b4ab2a96ac"} Dec 15 07:25:32 crc kubenswrapper[4876]: I1215 07:25:32.579575 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxsk9" event={"ID":"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f","Type":"ContainerStarted","Data":"c5e8311324d014ecf71210fbc1091982289036e5991e241146132a49d20881f4"} Dec 15 07:25:33 crc kubenswrapper[4876]: I1215 07:25:33.588624 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxsk9" event={"ID":"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f","Type":"ContainerStarted","Data":"96dbbbcedf1bf76080f86e2f8135e2ef3ec465b7755e27c97d835cb9de705e84"} Dec 15 07:25:34 crc kubenswrapper[4876]: I1215 07:25:34.599936 4876 generic.go:334] "Generic (PLEG): container finished" podID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerID="96dbbbcedf1bf76080f86e2f8135e2ef3ec465b7755e27c97d835cb9de705e84" exitCode=0 Dec 15 07:25:34 crc kubenswrapper[4876]: I1215 07:25:34.599991 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxsk9" event={"ID":"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f","Type":"ContainerDied","Data":"96dbbbcedf1bf76080f86e2f8135e2ef3ec465b7755e27c97d835cb9de705e84"} Dec 15 07:25:35 crc kubenswrapper[4876]: I1215 07:25:35.619847 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxsk9" event={"ID":"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f","Type":"ContainerStarted","Data":"2ede2abe9baeba25d7d0800eff2ed3d5e91e8d72e1c49f575ba518b1573195b4"} Dec 15 07:25:35 crc kubenswrapper[4876]: I1215 07:25:35.660329 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pxsk9" podStartSLOduration=3.232052988 podStartE2EDuration="5.660303893s" podCreationTimestamp="2025-12-15 07:25:30 +0000 UTC" firstStartedPulling="2025-12-15 07:25:32.580420031 +0000 UTC m=+2058.151562942" lastFinishedPulling="2025-12-15 07:25:35.008670896 +0000 UTC m=+2060.579813847" observedRunningTime="2025-12-15 07:25:35.651769513 +0000 UTC m=+2061.222912454" watchObservedRunningTime="2025-12-15 07:25:35.660303893 +0000 UTC m=+2061.231446844" Dec 15 07:25:41 crc kubenswrapper[4876]: I1215 07:25:41.175929 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:41 crc kubenswrapper[4876]: I1215 07:25:41.176755 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:41 crc kubenswrapper[4876]: I1215 07:25:41.222683 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:41 crc kubenswrapper[4876]: I1215 07:25:41.713411 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:41 crc kubenswrapper[4876]: I1215 07:25:41.757363 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pxsk9"] Dec 15 07:25:43 crc kubenswrapper[4876]: I1215 07:25:43.678861 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pxsk9" podUID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerName="registry-server" containerID="cri-o://2ede2abe9baeba25d7d0800eff2ed3d5e91e8d72e1c49f575ba518b1573195b4" gracePeriod=2 Dec 15 07:25:43 crc kubenswrapper[4876]: I1215 07:25:43.868751 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dj7ts"] Dec 15 07:25:43 crc kubenswrapper[4876]: I1215 07:25:43.870444 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:43 crc kubenswrapper[4876]: I1215 07:25:43.877017 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dj7ts"] Dec 15 07:25:43 crc kubenswrapper[4876]: I1215 07:25:43.929315 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-catalog-content\") pod \"redhat-marketplace-dj7ts\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:43 crc kubenswrapper[4876]: I1215 07:25:43.929451 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-utilities\") pod \"redhat-marketplace-dj7ts\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:43 crc kubenswrapper[4876]: I1215 07:25:43.929545 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj7ks\" (UniqueName: \"kubernetes.io/projected/29c63da4-8fa8-4815-886d-485c9b3cfcab-kube-api-access-fj7ks\") pod \"redhat-marketplace-dj7ts\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:44 crc kubenswrapper[4876]: I1215 07:25:44.030484 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj7ks\" (UniqueName: \"kubernetes.io/projected/29c63da4-8fa8-4815-886d-485c9b3cfcab-kube-api-access-fj7ks\") pod \"redhat-marketplace-dj7ts\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:44 crc kubenswrapper[4876]: I1215 07:25:44.030555 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-catalog-content\") pod \"redhat-marketplace-dj7ts\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:44 crc kubenswrapper[4876]: I1215 07:25:44.031128 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-utilities\") pod \"redhat-marketplace-dj7ts\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:44 crc kubenswrapper[4876]: I1215 07:25:44.031471 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-catalog-content\") pod \"redhat-marketplace-dj7ts\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:44 crc kubenswrapper[4876]: I1215 07:25:44.031487 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-utilities\") pod \"redhat-marketplace-dj7ts\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:44 crc kubenswrapper[4876]: I1215 07:25:44.050133 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj7ks\" (UniqueName: \"kubernetes.io/projected/29c63da4-8fa8-4815-886d-485c9b3cfcab-kube-api-access-fj7ks\") pod \"redhat-marketplace-dj7ts\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:44 crc kubenswrapper[4876]: I1215 07:25:44.245531 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:44 crc kubenswrapper[4876]: I1215 07:25:44.668848 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dj7ts"] Dec 15 07:25:44 crc kubenswrapper[4876]: I1215 07:25:44.690913 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dj7ts" event={"ID":"29c63da4-8fa8-4815-886d-485c9b3cfcab","Type":"ContainerStarted","Data":"7272c902e8135da81b8506e2167bceef3339fbcb841559d12fdeb0292a728b71"} Dec 15 07:25:45 crc kubenswrapper[4876]: I1215 07:25:45.700918 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dj7ts" event={"ID":"29c63da4-8fa8-4815-886d-485c9b3cfcab","Type":"ContainerStarted","Data":"8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098"} Dec 15 07:25:45 crc kubenswrapper[4876]: I1215 07:25:45.707342 4876 generic.go:334] "Generic (PLEG): container finished" podID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerID="2ede2abe9baeba25d7d0800eff2ed3d5e91e8d72e1c49f575ba518b1573195b4" exitCode=0 Dec 15 07:25:45 crc kubenswrapper[4876]: I1215 07:25:45.707400 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxsk9" event={"ID":"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f","Type":"ContainerDied","Data":"2ede2abe9baeba25d7d0800eff2ed3d5e91e8d72e1c49f575ba518b1573195b4"} Dec 15 07:25:46 crc kubenswrapper[4876]: I1215 07:25:46.722150 4876 generic.go:334] "Generic (PLEG): container finished" podID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerID="8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098" exitCode=0 Dec 15 07:25:46 crc kubenswrapper[4876]: I1215 07:25:46.724489 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dj7ts" event={"ID":"29c63da4-8fa8-4815-886d-485c9b3cfcab","Type":"ContainerDied","Data":"8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098"} Dec 15 07:25:46 crc kubenswrapper[4876]: I1215 07:25:46.859933 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:46 crc kubenswrapper[4876]: I1215 07:25:46.975510 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2h98\" (UniqueName: \"kubernetes.io/projected/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-kube-api-access-b2h98\") pod \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " Dec 15 07:25:46 crc kubenswrapper[4876]: I1215 07:25:46.975604 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-catalog-content\") pod \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " Dec 15 07:25:46 crc kubenswrapper[4876]: I1215 07:25:46.975717 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-utilities\") pod \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\" (UID: \"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f\") " Dec 15 07:25:46 crc kubenswrapper[4876]: I1215 07:25:46.976715 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-utilities" (OuterVolumeSpecName: "utilities") pod "ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" (UID: "ede9bb78-3754-4b3e-b8d8-ed1e5563f59f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:25:46 crc kubenswrapper[4876]: I1215 07:25:46.982586 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-kube-api-access-b2h98" (OuterVolumeSpecName: "kube-api-access-b2h98") pod "ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" (UID: "ede9bb78-3754-4b3e-b8d8-ed1e5563f59f"). InnerVolumeSpecName "kube-api-access-b2h98". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.077381 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.077434 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2h98\" (UniqueName: \"kubernetes.io/projected/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-kube-api-access-b2h98\") on node \"crc\" DevicePath \"\"" Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.114305 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" (UID: "ede9bb78-3754-4b3e-b8d8-ed1e5563f59f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.179505 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.731034 4876 generic.go:334] "Generic (PLEG): container finished" podID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerID="d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7" exitCode=0 Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.731126 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dj7ts" event={"ID":"29c63da4-8fa8-4815-886d-485c9b3cfcab","Type":"ContainerDied","Data":"d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7"} Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.733617 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pxsk9" event={"ID":"ede9bb78-3754-4b3e-b8d8-ed1e5563f59f","Type":"ContainerDied","Data":"c5e8311324d014ecf71210fbc1091982289036e5991e241146132a49d20881f4"} Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.733664 4876 scope.go:117] "RemoveContainer" containerID="2ede2abe9baeba25d7d0800eff2ed3d5e91e8d72e1c49f575ba518b1573195b4" Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.733666 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pxsk9" Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.750079 4876 scope.go:117] "RemoveContainer" containerID="96dbbbcedf1bf76080f86e2f8135e2ef3ec465b7755e27c97d835cb9de705e84" Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.769966 4876 scope.go:117] "RemoveContainer" containerID="d3e08c3e85f56bcbeae197aff75ba3f157670cf40d0d925f7dd6c2b4ab2a96ac" Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.786725 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pxsk9"] Dec 15 07:25:47 crc kubenswrapper[4876]: I1215 07:25:47.799004 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pxsk9"] Dec 15 07:25:48 crc kubenswrapper[4876]: I1215 07:25:48.714850 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" path="/var/lib/kubelet/pods/ede9bb78-3754-4b3e-b8d8-ed1e5563f59f/volumes" Dec 15 07:25:48 crc kubenswrapper[4876]: I1215 07:25:48.741672 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dj7ts" event={"ID":"29c63da4-8fa8-4815-886d-485c9b3cfcab","Type":"ContainerStarted","Data":"22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08"} Dec 15 07:25:48 crc kubenswrapper[4876]: I1215 07:25:48.772810 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dj7ts" podStartSLOduration=4.2780023289999995 podStartE2EDuration="5.772788195s" podCreationTimestamp="2025-12-15 07:25:43 +0000 UTC" firstStartedPulling="2025-12-15 07:25:46.725499079 +0000 UTC m=+2072.296642000" lastFinishedPulling="2025-12-15 07:25:48.220284955 +0000 UTC m=+2073.791427866" observedRunningTime="2025-12-15 07:25:48.767903363 +0000 UTC m=+2074.339046274" watchObservedRunningTime="2025-12-15 07:25:48.772788195 +0000 UTC m=+2074.343931106" Dec 15 07:25:54 crc kubenswrapper[4876]: I1215 07:25:54.246582 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:54 crc kubenswrapper[4876]: I1215 07:25:54.247206 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:54 crc kubenswrapper[4876]: I1215 07:25:54.306223 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:54 crc kubenswrapper[4876]: I1215 07:25:54.857263 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:54 crc kubenswrapper[4876]: I1215 07:25:54.913021 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dj7ts"] Dec 15 07:25:56 crc kubenswrapper[4876]: I1215 07:25:56.807887 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dj7ts" podUID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerName="registry-server" containerID="cri-o://22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08" gracePeriod=2 Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.734512 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.818366 4876 generic.go:334] "Generic (PLEG): container finished" podID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerID="22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08" exitCode=0 Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.818399 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dj7ts" event={"ID":"29c63da4-8fa8-4815-886d-485c9b3cfcab","Type":"ContainerDied","Data":"22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08"} Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.818697 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dj7ts" event={"ID":"29c63da4-8fa8-4815-886d-485c9b3cfcab","Type":"ContainerDied","Data":"7272c902e8135da81b8506e2167bceef3339fbcb841559d12fdeb0292a728b71"} Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.818719 4876 scope.go:117] "RemoveContainer" containerID="22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.818431 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dj7ts" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.838430 4876 scope.go:117] "RemoveContainer" containerID="d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.851649 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-utilities\") pod \"29c63da4-8fa8-4815-886d-485c9b3cfcab\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.851719 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj7ks\" (UniqueName: \"kubernetes.io/projected/29c63da4-8fa8-4815-886d-485c9b3cfcab-kube-api-access-fj7ks\") pod \"29c63da4-8fa8-4815-886d-485c9b3cfcab\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.851783 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-catalog-content\") pod \"29c63da4-8fa8-4815-886d-485c9b3cfcab\" (UID: \"29c63da4-8fa8-4815-886d-485c9b3cfcab\") " Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.852911 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-utilities" (OuterVolumeSpecName: "utilities") pod "29c63da4-8fa8-4815-886d-485c9b3cfcab" (UID: "29c63da4-8fa8-4815-886d-485c9b3cfcab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.853030 4876 scope.go:117] "RemoveContainer" containerID="8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.857382 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29c63da4-8fa8-4815-886d-485c9b3cfcab-kube-api-access-fj7ks" (OuterVolumeSpecName: "kube-api-access-fj7ks") pod "29c63da4-8fa8-4815-886d-485c9b3cfcab" (UID: "29c63da4-8fa8-4815-886d-485c9b3cfcab"). InnerVolumeSpecName "kube-api-access-fj7ks". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.873777 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "29c63da4-8fa8-4815-886d-485c9b3cfcab" (UID: "29c63da4-8fa8-4815-886d-485c9b3cfcab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.905887 4876 scope.go:117] "RemoveContainer" containerID="22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08" Dec 15 07:25:57 crc kubenswrapper[4876]: E1215 07:25:57.906850 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08\": container with ID starting with 22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08 not found: ID does not exist" containerID="22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.906911 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08"} err="failed to get container status \"22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08\": rpc error: code = NotFound desc = could not find container \"22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08\": container with ID starting with 22a8cf59cca4dd642fcabe70da49e5ad3416f2972a501b5374d61b0ff86e8a08 not found: ID does not exist" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.906952 4876 scope.go:117] "RemoveContainer" containerID="d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7" Dec 15 07:25:57 crc kubenswrapper[4876]: E1215 07:25:57.907309 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7\": container with ID starting with d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7 not found: ID does not exist" containerID="d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.907345 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7"} err="failed to get container status \"d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7\": rpc error: code = NotFound desc = could not find container \"d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7\": container with ID starting with d62bbe4bdc935bb60c252dce4925bdf5d0a11e6d0a3fa0b59e065b2ffb5155d7 not found: ID does not exist" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.907374 4876 scope.go:117] "RemoveContainer" containerID="8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098" Dec 15 07:25:57 crc kubenswrapper[4876]: E1215 07:25:57.907657 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098\": container with ID starting with 8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098 not found: ID does not exist" containerID="8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.907693 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098"} err="failed to get container status \"8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098\": rpc error: code = NotFound desc = could not find container \"8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098\": container with ID starting with 8ce1b0a429cbfe6abec15dc37e39644b5d3b4f5a0c36ae6b0baafa8f93a12098 not found: ID does not exist" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.953583 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.953621 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj7ks\" (UniqueName: \"kubernetes.io/projected/29c63da4-8fa8-4815-886d-485c9b3cfcab-kube-api-access-fj7ks\") on node \"crc\" DevicePath \"\"" Dec 15 07:25:57 crc kubenswrapper[4876]: I1215 07:25:57.953632 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/29c63da4-8fa8-4815-886d-485c9b3cfcab-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:25:58 crc kubenswrapper[4876]: I1215 07:25:58.173356 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dj7ts"] Dec 15 07:25:58 crc kubenswrapper[4876]: I1215 07:25:58.182151 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dj7ts"] Dec 15 07:25:58 crc kubenswrapper[4876]: I1215 07:25:58.719709 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29c63da4-8fa8-4815-886d-485c9b3cfcab" path="/var/lib/kubelet/pods/29c63da4-8fa8-4815-886d-485c9b3cfcab/volumes" Dec 15 07:26:57 crc kubenswrapper[4876]: I1215 07:26:57.322485 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:26:57 crc kubenswrapper[4876]: I1215 07:26:57.323148 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.125163 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m8rc2"] Dec 15 07:27:25 crc kubenswrapper[4876]: E1215 07:27:25.125964 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerName="extract-utilities" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.125979 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerName="extract-utilities" Dec 15 07:27:25 crc kubenswrapper[4876]: E1215 07:27:25.125993 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerName="extract-utilities" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.126001 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerName="extract-utilities" Dec 15 07:27:25 crc kubenswrapper[4876]: E1215 07:27:25.126019 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerName="registry-server" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.126052 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerName="registry-server" Dec 15 07:27:25 crc kubenswrapper[4876]: E1215 07:27:25.126066 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerName="extract-content" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.126073 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerName="extract-content" Dec 15 07:27:25 crc kubenswrapper[4876]: E1215 07:27:25.126092 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerName="registry-server" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.126119 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerName="registry-server" Dec 15 07:27:25 crc kubenswrapper[4876]: E1215 07:27:25.126134 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerName="extract-content" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.126141 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerName="extract-content" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.126303 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="29c63da4-8fa8-4815-886d-485c9b3cfcab" containerName="registry-server" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.126321 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ede9bb78-3754-4b3e-b8d8-ed1e5563f59f" containerName="registry-server" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.127458 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.135943 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m8rc2"] Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.241845 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-utilities\") pod \"certified-operators-m8rc2\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.241897 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f87gh\" (UniqueName: \"kubernetes.io/projected/a78415b7-1b32-4b25-a9f3-c0af24d43474-kube-api-access-f87gh\") pod \"certified-operators-m8rc2\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.241939 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-catalog-content\") pod \"certified-operators-m8rc2\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.343801 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-utilities\") pod \"certified-operators-m8rc2\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.343867 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f87gh\" (UniqueName: \"kubernetes.io/projected/a78415b7-1b32-4b25-a9f3-c0af24d43474-kube-api-access-f87gh\") pod \"certified-operators-m8rc2\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.343928 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-catalog-content\") pod \"certified-operators-m8rc2\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.344380 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-utilities\") pod \"certified-operators-m8rc2\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.344532 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-catalog-content\") pod \"certified-operators-m8rc2\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.364953 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f87gh\" (UniqueName: \"kubernetes.io/projected/a78415b7-1b32-4b25-a9f3-c0af24d43474-kube-api-access-f87gh\") pod \"certified-operators-m8rc2\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.459207 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:25 crc kubenswrapper[4876]: I1215 07:27:25.926348 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m8rc2"] Dec 15 07:27:26 crc kubenswrapper[4876]: I1215 07:27:26.901639 4876 generic.go:334] "Generic (PLEG): container finished" podID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerID="2e94fe57f3a76102420d7890ebb035c1dbb49dbfbd8e28d5cc6c93d19a64ed43" exitCode=0 Dec 15 07:27:26 crc kubenswrapper[4876]: I1215 07:27:26.901700 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8rc2" event={"ID":"a78415b7-1b32-4b25-a9f3-c0af24d43474","Type":"ContainerDied","Data":"2e94fe57f3a76102420d7890ebb035c1dbb49dbfbd8e28d5cc6c93d19a64ed43"} Dec 15 07:27:26 crc kubenswrapper[4876]: I1215 07:27:26.901757 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8rc2" event={"ID":"a78415b7-1b32-4b25-a9f3-c0af24d43474","Type":"ContainerStarted","Data":"3ad7a567ca8f399bde145fa67bf340bebc96c6346d05a44a32c805570cd632f4"} Dec 15 07:27:26 crc kubenswrapper[4876]: I1215 07:27:26.904733 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 07:27:27 crc kubenswrapper[4876]: I1215 07:27:27.323014 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:27:27 crc kubenswrapper[4876]: I1215 07:27:27.323377 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:27:27 crc kubenswrapper[4876]: I1215 07:27:27.912485 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8rc2" event={"ID":"a78415b7-1b32-4b25-a9f3-c0af24d43474","Type":"ContainerStarted","Data":"35c4008e2adb4b0b0134a88df209977e44e749290a5b0f715130b62565645f68"} Dec 15 07:27:28 crc kubenswrapper[4876]: I1215 07:27:28.928436 4876 generic.go:334] "Generic (PLEG): container finished" podID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerID="35c4008e2adb4b0b0134a88df209977e44e749290a5b0f715130b62565645f68" exitCode=0 Dec 15 07:27:28 crc kubenswrapper[4876]: I1215 07:27:28.928944 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8rc2" event={"ID":"a78415b7-1b32-4b25-a9f3-c0af24d43474","Type":"ContainerDied","Data":"35c4008e2adb4b0b0134a88df209977e44e749290a5b0f715130b62565645f68"} Dec 15 07:27:30 crc kubenswrapper[4876]: I1215 07:27:30.945062 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8rc2" event={"ID":"a78415b7-1b32-4b25-a9f3-c0af24d43474","Type":"ContainerStarted","Data":"5cb56aa4abd1ed490bc0cb343eaa922c3c29152758c44ebccb0661445ba2963a"} Dec 15 07:27:30 crc kubenswrapper[4876]: I1215 07:27:30.964910 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m8rc2" podStartSLOduration=2.449521265 podStartE2EDuration="5.964874764s" podCreationTimestamp="2025-12-15 07:27:25 +0000 UTC" firstStartedPulling="2025-12-15 07:27:26.904452143 +0000 UTC m=+2172.475595054" lastFinishedPulling="2025-12-15 07:27:30.419805632 +0000 UTC m=+2175.990948553" observedRunningTime="2025-12-15 07:27:30.962246683 +0000 UTC m=+2176.533389604" watchObservedRunningTime="2025-12-15 07:27:30.964874764 +0000 UTC m=+2176.536017675" Dec 15 07:27:35 crc kubenswrapper[4876]: I1215 07:27:35.459864 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:35 crc kubenswrapper[4876]: I1215 07:27:35.460243 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:35 crc kubenswrapper[4876]: I1215 07:27:35.535580 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:36 crc kubenswrapper[4876]: I1215 07:27:36.033876 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:36 crc kubenswrapper[4876]: I1215 07:27:36.076535 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m8rc2"] Dec 15 07:27:37 crc kubenswrapper[4876]: I1215 07:27:37.995356 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m8rc2" podUID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerName="registry-server" containerID="cri-o://5cb56aa4abd1ed490bc0cb343eaa922c3c29152758c44ebccb0661445ba2963a" gracePeriod=2 Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.003749 4876 generic.go:334] "Generic (PLEG): container finished" podID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerID="5cb56aa4abd1ed490bc0cb343eaa922c3c29152758c44ebccb0661445ba2963a" exitCode=0 Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.003929 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8rc2" event={"ID":"a78415b7-1b32-4b25-a9f3-c0af24d43474","Type":"ContainerDied","Data":"5cb56aa4abd1ed490bc0cb343eaa922c3c29152758c44ebccb0661445ba2963a"} Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.535857 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.569357 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-catalog-content\") pod \"a78415b7-1b32-4b25-a9f3-c0af24d43474\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.569424 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-utilities\") pod \"a78415b7-1b32-4b25-a9f3-c0af24d43474\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.569461 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f87gh\" (UniqueName: \"kubernetes.io/projected/a78415b7-1b32-4b25-a9f3-c0af24d43474-kube-api-access-f87gh\") pod \"a78415b7-1b32-4b25-a9f3-c0af24d43474\" (UID: \"a78415b7-1b32-4b25-a9f3-c0af24d43474\") " Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.570450 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-utilities" (OuterVolumeSpecName: "utilities") pod "a78415b7-1b32-4b25-a9f3-c0af24d43474" (UID: "a78415b7-1b32-4b25-a9f3-c0af24d43474"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.574479 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a78415b7-1b32-4b25-a9f3-c0af24d43474-kube-api-access-f87gh" (OuterVolumeSpecName: "kube-api-access-f87gh") pod "a78415b7-1b32-4b25-a9f3-c0af24d43474" (UID: "a78415b7-1b32-4b25-a9f3-c0af24d43474"). InnerVolumeSpecName "kube-api-access-f87gh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.619003 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a78415b7-1b32-4b25-a9f3-c0af24d43474" (UID: "a78415b7-1b32-4b25-a9f3-c0af24d43474"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.670398 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.670427 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a78415b7-1b32-4b25-a9f3-c0af24d43474-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:27:39 crc kubenswrapper[4876]: I1215 07:27:39.670437 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f87gh\" (UniqueName: \"kubernetes.io/projected/a78415b7-1b32-4b25-a9f3-c0af24d43474-kube-api-access-f87gh\") on node \"crc\" DevicePath \"\"" Dec 15 07:27:40 crc kubenswrapper[4876]: I1215 07:27:40.021012 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m8rc2" event={"ID":"a78415b7-1b32-4b25-a9f3-c0af24d43474","Type":"ContainerDied","Data":"3ad7a567ca8f399bde145fa67bf340bebc96c6346d05a44a32c805570cd632f4"} Dec 15 07:27:40 crc kubenswrapper[4876]: I1215 07:27:40.021062 4876 scope.go:117] "RemoveContainer" containerID="5cb56aa4abd1ed490bc0cb343eaa922c3c29152758c44ebccb0661445ba2963a" Dec 15 07:27:40 crc kubenswrapper[4876]: I1215 07:27:40.021167 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m8rc2" Dec 15 07:27:40 crc kubenswrapper[4876]: I1215 07:27:40.059779 4876 scope.go:117] "RemoveContainer" containerID="35c4008e2adb4b0b0134a88df209977e44e749290a5b0f715130b62565645f68" Dec 15 07:27:40 crc kubenswrapper[4876]: I1215 07:27:40.066569 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m8rc2"] Dec 15 07:27:40 crc kubenswrapper[4876]: I1215 07:27:40.074925 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m8rc2"] Dec 15 07:27:40 crc kubenswrapper[4876]: I1215 07:27:40.088178 4876 scope.go:117] "RemoveContainer" containerID="2e94fe57f3a76102420d7890ebb035c1dbb49dbfbd8e28d5cc6c93d19a64ed43" Dec 15 07:27:40 crc kubenswrapper[4876]: I1215 07:27:40.713824 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a78415b7-1b32-4b25-a9f3-c0af24d43474" path="/var/lib/kubelet/pods/a78415b7-1b32-4b25-a9f3-c0af24d43474/volumes" Dec 15 07:27:57 crc kubenswrapper[4876]: I1215 07:27:57.323405 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:27:57 crc kubenswrapper[4876]: I1215 07:27:57.323801 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:27:57 crc kubenswrapper[4876]: I1215 07:27:57.323871 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:27:57 crc kubenswrapper[4876]: I1215 07:27:57.324752 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:27:57 crc kubenswrapper[4876]: I1215 07:27:57.324852 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" gracePeriod=600 Dec 15 07:27:57 crc kubenswrapper[4876]: E1215 07:27:57.457578 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:27:58 crc kubenswrapper[4876]: I1215 07:27:58.180427 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" exitCode=0 Dec 15 07:27:58 crc kubenswrapper[4876]: I1215 07:27:58.180527 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae"} Dec 15 07:27:58 crc kubenswrapper[4876]: I1215 07:27:58.181210 4876 scope.go:117] "RemoveContainer" containerID="306570a9232acef7a0cfba5e03342446c03cab0d4458c14f4cdfe2041d8911e0" Dec 15 07:27:58 crc kubenswrapper[4876]: I1215 07:27:58.181853 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:27:58 crc kubenswrapper[4876]: E1215 07:27:58.182367 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:28:12 crc kubenswrapper[4876]: I1215 07:28:12.707486 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:28:12 crc kubenswrapper[4876]: E1215 07:28:12.708460 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:28:24 crc kubenswrapper[4876]: I1215 07:28:24.709056 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:28:24 crc kubenswrapper[4876]: E1215 07:28:24.710359 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:28:36 crc kubenswrapper[4876]: I1215 07:28:36.706252 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:28:36 crc kubenswrapper[4876]: E1215 07:28:36.707150 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:28:48 crc kubenswrapper[4876]: I1215 07:28:48.705428 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:28:48 crc kubenswrapper[4876]: E1215 07:28:48.706199 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:29:00 crc kubenswrapper[4876]: I1215 07:29:00.705557 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:29:00 crc kubenswrapper[4876]: E1215 07:29:00.706198 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:29:14 crc kubenswrapper[4876]: I1215 07:29:14.705818 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:29:14 crc kubenswrapper[4876]: E1215 07:29:14.708988 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:29:27 crc kubenswrapper[4876]: I1215 07:29:27.706384 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:29:27 crc kubenswrapper[4876]: E1215 07:29:27.707757 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:29:39 crc kubenswrapper[4876]: I1215 07:29:39.705831 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:29:39 crc kubenswrapper[4876]: E1215 07:29:39.706782 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:29:50 crc kubenswrapper[4876]: I1215 07:29:50.706189 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:29:50 crc kubenswrapper[4876]: E1215 07:29:50.707264 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.163854 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl"] Dec 15 07:30:00 crc kubenswrapper[4876]: E1215 07:30:00.165034 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerName="registry-server" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.165058 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerName="registry-server" Dec 15 07:30:00 crc kubenswrapper[4876]: E1215 07:30:00.165086 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerName="extract-utilities" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.165099 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerName="extract-utilities" Dec 15 07:30:00 crc kubenswrapper[4876]: E1215 07:30:00.165170 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerName="extract-content" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.165184 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerName="extract-content" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.165409 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a78415b7-1b32-4b25-a9f3-c0af24d43474" containerName="registry-server" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.166099 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.168449 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.169874 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.174124 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl"] Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.202321 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq7xc\" (UniqueName: \"kubernetes.io/projected/82f1250f-9f77-46f6-9332-9639e443ad70-kube-api-access-pq7xc\") pod \"collect-profiles-29429730-74xbl\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.202415 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/82f1250f-9f77-46f6-9332-9639e443ad70-secret-volume\") pod \"collect-profiles-29429730-74xbl\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.202480 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/82f1250f-9f77-46f6-9332-9639e443ad70-config-volume\") pod \"collect-profiles-29429730-74xbl\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.303695 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/82f1250f-9f77-46f6-9332-9639e443ad70-config-volume\") pod \"collect-profiles-29429730-74xbl\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.303786 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq7xc\" (UniqueName: \"kubernetes.io/projected/82f1250f-9f77-46f6-9332-9639e443ad70-kube-api-access-pq7xc\") pod \"collect-profiles-29429730-74xbl\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.303822 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/82f1250f-9f77-46f6-9332-9639e443ad70-secret-volume\") pod \"collect-profiles-29429730-74xbl\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.304643 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/82f1250f-9f77-46f6-9332-9639e443ad70-config-volume\") pod \"collect-profiles-29429730-74xbl\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.309541 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/82f1250f-9f77-46f6-9332-9639e443ad70-secret-volume\") pod \"collect-profiles-29429730-74xbl\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.318264 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq7xc\" (UniqueName: \"kubernetes.io/projected/82f1250f-9f77-46f6-9332-9639e443ad70-kube-api-access-pq7xc\") pod \"collect-profiles-29429730-74xbl\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.493889 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:00 crc kubenswrapper[4876]: I1215 07:30:00.903824 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl"] Dec 15 07:30:01 crc kubenswrapper[4876]: I1215 07:30:01.186287 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" event={"ID":"82f1250f-9f77-46f6-9332-9639e443ad70","Type":"ContainerStarted","Data":"8c25af4b74315f842b92ecc56be19e77ca2d7032d5e3123c31a1caeb74cfc033"} Dec 15 07:30:01 crc kubenswrapper[4876]: I1215 07:30:01.188492 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" event={"ID":"82f1250f-9f77-46f6-9332-9639e443ad70","Type":"ContainerStarted","Data":"d3ad85711ba03ce5df15ba89a8f9c3050909bcc473dd55c86abb8208ceca5b5c"} Dec 15 07:30:01 crc kubenswrapper[4876]: I1215 07:30:01.207736 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" podStartSLOduration=1.207717521 podStartE2EDuration="1.207717521s" podCreationTimestamp="2025-12-15 07:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 07:30:01.20292413 +0000 UTC m=+2326.774067041" watchObservedRunningTime="2025-12-15 07:30:01.207717521 +0000 UTC m=+2326.778860432" Dec 15 07:30:02 crc kubenswrapper[4876]: I1215 07:30:02.196238 4876 generic.go:334] "Generic (PLEG): container finished" podID="82f1250f-9f77-46f6-9332-9639e443ad70" containerID="8c25af4b74315f842b92ecc56be19e77ca2d7032d5e3123c31a1caeb74cfc033" exitCode=0 Dec 15 07:30:02 crc kubenswrapper[4876]: I1215 07:30:02.196361 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" event={"ID":"82f1250f-9f77-46f6-9332-9639e443ad70","Type":"ContainerDied","Data":"8c25af4b74315f842b92ecc56be19e77ca2d7032d5e3123c31a1caeb74cfc033"} Dec 15 07:30:02 crc kubenswrapper[4876]: I1215 07:30:02.705765 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:30:02 crc kubenswrapper[4876]: E1215 07:30:02.706282 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:30:03 crc kubenswrapper[4876]: I1215 07:30:03.448349 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:03 crc kubenswrapper[4876]: I1215 07:30:03.557478 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq7xc\" (UniqueName: \"kubernetes.io/projected/82f1250f-9f77-46f6-9332-9639e443ad70-kube-api-access-pq7xc\") pod \"82f1250f-9f77-46f6-9332-9639e443ad70\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " Dec 15 07:30:03 crc kubenswrapper[4876]: I1215 07:30:03.557559 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/82f1250f-9f77-46f6-9332-9639e443ad70-config-volume\") pod \"82f1250f-9f77-46f6-9332-9639e443ad70\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " Dec 15 07:30:03 crc kubenswrapper[4876]: I1215 07:30:03.557699 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/82f1250f-9f77-46f6-9332-9639e443ad70-secret-volume\") pod \"82f1250f-9f77-46f6-9332-9639e443ad70\" (UID: \"82f1250f-9f77-46f6-9332-9639e443ad70\") " Dec 15 07:30:03 crc kubenswrapper[4876]: I1215 07:30:03.558348 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82f1250f-9f77-46f6-9332-9639e443ad70-config-volume" (OuterVolumeSpecName: "config-volume") pod "82f1250f-9f77-46f6-9332-9639e443ad70" (UID: "82f1250f-9f77-46f6-9332-9639e443ad70"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:30:03 crc kubenswrapper[4876]: I1215 07:30:03.563387 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82f1250f-9f77-46f6-9332-9639e443ad70-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "82f1250f-9f77-46f6-9332-9639e443ad70" (UID: "82f1250f-9f77-46f6-9332-9639e443ad70"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:30:03 crc kubenswrapper[4876]: I1215 07:30:03.563580 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82f1250f-9f77-46f6-9332-9639e443ad70-kube-api-access-pq7xc" (OuterVolumeSpecName: "kube-api-access-pq7xc") pod "82f1250f-9f77-46f6-9332-9639e443ad70" (UID: "82f1250f-9f77-46f6-9332-9639e443ad70"). InnerVolumeSpecName "kube-api-access-pq7xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:30:03 crc kubenswrapper[4876]: I1215 07:30:03.658888 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq7xc\" (UniqueName: \"kubernetes.io/projected/82f1250f-9f77-46f6-9332-9639e443ad70-kube-api-access-pq7xc\") on node \"crc\" DevicePath \"\"" Dec 15 07:30:03 crc kubenswrapper[4876]: I1215 07:30:03.658923 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/82f1250f-9f77-46f6-9332-9639e443ad70-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 07:30:03 crc kubenswrapper[4876]: I1215 07:30:03.658933 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/82f1250f-9f77-46f6-9332-9639e443ad70-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 07:30:04 crc kubenswrapper[4876]: I1215 07:30:04.215921 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" event={"ID":"82f1250f-9f77-46f6-9332-9639e443ad70","Type":"ContainerDied","Data":"d3ad85711ba03ce5df15ba89a8f9c3050909bcc473dd55c86abb8208ceca5b5c"} Dec 15 07:30:04 crc kubenswrapper[4876]: I1215 07:30:04.216031 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3ad85711ba03ce5df15ba89a8f9c3050909bcc473dd55c86abb8208ceca5b5c" Dec 15 07:30:04 crc kubenswrapper[4876]: I1215 07:30:04.215990 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl" Dec 15 07:30:04 crc kubenswrapper[4876]: I1215 07:30:04.301824 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c"] Dec 15 07:30:04 crc kubenswrapper[4876]: I1215 07:30:04.308834 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429685-bzx4c"] Dec 15 07:30:04 crc kubenswrapper[4876]: I1215 07:30:04.718509 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df271eaa-2f40-4651-9942-5c6e535654d6" path="/var/lib/kubelet/pods/df271eaa-2f40-4651-9942-5c6e535654d6/volumes" Dec 15 07:30:17 crc kubenswrapper[4876]: I1215 07:30:17.705758 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:30:17 crc kubenswrapper[4876]: E1215 07:30:17.706559 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:30:21 crc kubenswrapper[4876]: I1215 07:30:21.103193 4876 scope.go:117] "RemoveContainer" containerID="5be7d03bb9ea3495b836f4564d633dc2d3ffe84c283b160d677fb54cfc4f5096" Dec 15 07:30:31 crc kubenswrapper[4876]: I1215 07:30:31.706092 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:30:31 crc kubenswrapper[4876]: E1215 07:30:31.707000 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:30:44 crc kubenswrapper[4876]: I1215 07:30:44.713677 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:30:44 crc kubenswrapper[4876]: E1215 07:30:44.715227 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:30:58 crc kubenswrapper[4876]: I1215 07:30:58.705201 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:30:58 crc kubenswrapper[4876]: E1215 07:30:58.706066 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:31:12 crc kubenswrapper[4876]: I1215 07:31:12.707171 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:31:12 crc kubenswrapper[4876]: E1215 07:31:12.707959 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:31:23 crc kubenswrapper[4876]: I1215 07:31:23.705917 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:31:23 crc kubenswrapper[4876]: E1215 07:31:23.706842 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:31:38 crc kubenswrapper[4876]: I1215 07:31:38.705797 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:31:38 crc kubenswrapper[4876]: E1215 07:31:38.707238 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:31:49 crc kubenswrapper[4876]: I1215 07:31:49.705480 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:31:49 crc kubenswrapper[4876]: E1215 07:31:49.706472 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:32:04 crc kubenswrapper[4876]: I1215 07:32:04.709298 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:32:04 crc kubenswrapper[4876]: E1215 07:32:04.710015 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:32:15 crc kubenswrapper[4876]: I1215 07:32:15.705817 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:32:15 crc kubenswrapper[4876]: E1215 07:32:15.706699 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:32:28 crc kubenswrapper[4876]: I1215 07:32:28.706396 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:32:28 crc kubenswrapper[4876]: E1215 07:32:28.707339 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:32:42 crc kubenswrapper[4876]: I1215 07:32:42.705526 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:32:42 crc kubenswrapper[4876]: E1215 07:32:42.706483 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:32:53 crc kubenswrapper[4876]: I1215 07:32:53.705895 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:32:53 crc kubenswrapper[4876]: E1215 07:32:53.707035 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:33:01 crc kubenswrapper[4876]: I1215 07:33:01.868602 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rh9l4"] Dec 15 07:33:01 crc kubenswrapper[4876]: E1215 07:33:01.869610 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82f1250f-9f77-46f6-9332-9639e443ad70" containerName="collect-profiles" Dec 15 07:33:01 crc kubenswrapper[4876]: I1215 07:33:01.869625 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="82f1250f-9f77-46f6-9332-9639e443ad70" containerName="collect-profiles" Dec 15 07:33:01 crc kubenswrapper[4876]: I1215 07:33:01.869782 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="82f1250f-9f77-46f6-9332-9639e443ad70" containerName="collect-profiles" Dec 15 07:33:01 crc kubenswrapper[4876]: I1215 07:33:01.870781 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:01 crc kubenswrapper[4876]: I1215 07:33:01.882768 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rh9l4"] Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.042827 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-utilities\") pod \"community-operators-rh9l4\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.042874 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55fmq\" (UniqueName: \"kubernetes.io/projected/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-kube-api-access-55fmq\") pod \"community-operators-rh9l4\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.043502 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-catalog-content\") pod \"community-operators-rh9l4\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.144964 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-utilities\") pod \"community-operators-rh9l4\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.145020 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55fmq\" (UniqueName: \"kubernetes.io/projected/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-kube-api-access-55fmq\") pod \"community-operators-rh9l4\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.145070 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-catalog-content\") pod \"community-operators-rh9l4\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.145624 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-catalog-content\") pod \"community-operators-rh9l4\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.146153 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-utilities\") pod \"community-operators-rh9l4\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.171803 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55fmq\" (UniqueName: \"kubernetes.io/projected/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-kube-api-access-55fmq\") pod \"community-operators-rh9l4\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.195010 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:02 crc kubenswrapper[4876]: I1215 07:33:02.694509 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rh9l4"] Dec 15 07:33:03 crc kubenswrapper[4876]: I1215 07:33:03.624776 4876 generic.go:334] "Generic (PLEG): container finished" podID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerID="d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd" exitCode=0 Dec 15 07:33:03 crc kubenswrapper[4876]: I1215 07:33:03.624993 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh9l4" event={"ID":"137d7e14-9d53-450f-90ed-3c1ed4e6aed2","Type":"ContainerDied","Data":"d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd"} Dec 15 07:33:03 crc kubenswrapper[4876]: I1215 07:33:03.625092 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh9l4" event={"ID":"137d7e14-9d53-450f-90ed-3c1ed4e6aed2","Type":"ContainerStarted","Data":"244cc8e492e5a85fc69f5c17820a80be368aa0b2bd3353ec2e7d9eb0db7a7ce2"} Dec 15 07:33:03 crc kubenswrapper[4876]: I1215 07:33:03.627406 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 07:33:04 crc kubenswrapper[4876]: I1215 07:33:04.636418 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh9l4" event={"ID":"137d7e14-9d53-450f-90ed-3c1ed4e6aed2","Type":"ContainerStarted","Data":"7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9"} Dec 15 07:33:05 crc kubenswrapper[4876]: I1215 07:33:05.650317 4876 generic.go:334] "Generic (PLEG): container finished" podID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerID="7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9" exitCode=0 Dec 15 07:33:05 crc kubenswrapper[4876]: I1215 07:33:05.650383 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh9l4" event={"ID":"137d7e14-9d53-450f-90ed-3c1ed4e6aed2","Type":"ContainerDied","Data":"7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9"} Dec 15 07:33:06 crc kubenswrapper[4876]: I1215 07:33:06.659839 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh9l4" event={"ID":"137d7e14-9d53-450f-90ed-3c1ed4e6aed2","Type":"ContainerStarted","Data":"f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549"} Dec 15 07:33:06 crc kubenswrapper[4876]: I1215 07:33:06.688432 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rh9l4" podStartSLOduration=3.235679609 podStartE2EDuration="5.688409673s" podCreationTimestamp="2025-12-15 07:33:01 +0000 UTC" firstStartedPulling="2025-12-15 07:33:03.62715889 +0000 UTC m=+2509.198301801" lastFinishedPulling="2025-12-15 07:33:06.079888914 +0000 UTC m=+2511.651031865" observedRunningTime="2025-12-15 07:33:06.682631516 +0000 UTC m=+2512.253774447" watchObservedRunningTime="2025-12-15 07:33:06.688409673 +0000 UTC m=+2512.259552614" Dec 15 07:33:08 crc kubenswrapper[4876]: I1215 07:33:08.705387 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:33:09 crc kubenswrapper[4876]: I1215 07:33:09.687586 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"a4dde018a5df171a89b4d2936e77ded8d335ffd3cc641e22bc483b9f53c3460e"} Dec 15 07:33:12 crc kubenswrapper[4876]: I1215 07:33:12.196214 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:12 crc kubenswrapper[4876]: I1215 07:33:12.197068 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:12 crc kubenswrapper[4876]: I1215 07:33:12.253716 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:12 crc kubenswrapper[4876]: I1215 07:33:12.814541 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:12 crc kubenswrapper[4876]: I1215 07:33:12.860418 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rh9l4"] Dec 15 07:33:14 crc kubenswrapper[4876]: I1215 07:33:14.741821 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rh9l4" podUID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerName="registry-server" containerID="cri-o://f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549" gracePeriod=2 Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.203612 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.343204 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-utilities\") pod \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.343261 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55fmq\" (UniqueName: \"kubernetes.io/projected/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-kube-api-access-55fmq\") pod \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.343320 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-catalog-content\") pod \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\" (UID: \"137d7e14-9d53-450f-90ed-3c1ed4e6aed2\") " Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.344055 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-utilities" (OuterVolumeSpecName: "utilities") pod "137d7e14-9d53-450f-90ed-3c1ed4e6aed2" (UID: "137d7e14-9d53-450f-90ed-3c1ed4e6aed2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.349229 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-kube-api-access-55fmq" (OuterVolumeSpecName: "kube-api-access-55fmq") pod "137d7e14-9d53-450f-90ed-3c1ed4e6aed2" (UID: "137d7e14-9d53-450f-90ed-3c1ed4e6aed2"). InnerVolumeSpecName "kube-api-access-55fmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.392042 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "137d7e14-9d53-450f-90ed-3c1ed4e6aed2" (UID: "137d7e14-9d53-450f-90ed-3c1ed4e6aed2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.445100 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.445322 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55fmq\" (UniqueName: \"kubernetes.io/projected/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-kube-api-access-55fmq\") on node \"crc\" DevicePath \"\"" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.445389 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/137d7e14-9d53-450f-90ed-3c1ed4e6aed2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.753345 4876 generic.go:334] "Generic (PLEG): container finished" podID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerID="f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549" exitCode=0 Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.753401 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh9l4" event={"ID":"137d7e14-9d53-450f-90ed-3c1ed4e6aed2","Type":"ContainerDied","Data":"f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549"} Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.753471 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rh9l4" event={"ID":"137d7e14-9d53-450f-90ed-3c1ed4e6aed2","Type":"ContainerDied","Data":"244cc8e492e5a85fc69f5c17820a80be368aa0b2bd3353ec2e7d9eb0db7a7ce2"} Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.753496 4876 scope.go:117] "RemoveContainer" containerID="f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.754885 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rh9l4" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.789730 4876 scope.go:117] "RemoveContainer" containerID="7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.802198 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rh9l4"] Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.806545 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rh9l4"] Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.823488 4876 scope.go:117] "RemoveContainer" containerID="d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.840903 4876 scope.go:117] "RemoveContainer" containerID="f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549" Dec 15 07:33:15 crc kubenswrapper[4876]: E1215 07:33:15.841499 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549\": container with ID starting with f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549 not found: ID does not exist" containerID="f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.841576 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549"} err="failed to get container status \"f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549\": rpc error: code = NotFound desc = could not find container \"f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549\": container with ID starting with f59ea4180274758949d6fa05461af87226163dd602296f8b31c78e449b6f1549 not found: ID does not exist" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.841623 4876 scope.go:117] "RemoveContainer" containerID="7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9" Dec 15 07:33:15 crc kubenswrapper[4876]: E1215 07:33:15.842075 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9\": container with ID starting with 7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9 not found: ID does not exist" containerID="7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.842180 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9"} err="failed to get container status \"7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9\": rpc error: code = NotFound desc = could not find container \"7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9\": container with ID starting with 7fe0754b64e1bef299efdd585d65e6b288d2f8ce6301e1b5bf0367ca129ebcd9 not found: ID does not exist" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.842225 4876 scope.go:117] "RemoveContainer" containerID="d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd" Dec 15 07:33:15 crc kubenswrapper[4876]: E1215 07:33:15.842805 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd\": container with ID starting with d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd not found: ID does not exist" containerID="d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd" Dec 15 07:33:15 crc kubenswrapper[4876]: I1215 07:33:15.842840 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd"} err="failed to get container status \"d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd\": rpc error: code = NotFound desc = could not find container \"d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd\": container with ID starting with d5268daf9b7c6a24ca2988c33ea59b2c9163f33604ae6af0fef908c3c1775bcd not found: ID does not exist" Dec 15 07:33:16 crc kubenswrapper[4876]: I1215 07:33:16.721206 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" path="/var/lib/kubelet/pods/137d7e14-9d53-450f-90ed-3c1ed4e6aed2/volumes" Dec 15 07:35:27 crc kubenswrapper[4876]: I1215 07:35:27.322678 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:35:27 crc kubenswrapper[4876]: I1215 07:35:27.323301 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:35:57 crc kubenswrapper[4876]: I1215 07:35:57.323157 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:35:57 crc kubenswrapper[4876]: I1215 07:35:57.323640 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.453574 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rdcm7"] Dec 15 07:36:21 crc kubenswrapper[4876]: E1215 07:36:21.454669 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerName="extract-utilities" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.454688 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerName="extract-utilities" Dec 15 07:36:21 crc kubenswrapper[4876]: E1215 07:36:21.454731 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerName="extract-content" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.454742 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerName="extract-content" Dec 15 07:36:21 crc kubenswrapper[4876]: E1215 07:36:21.454760 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerName="registry-server" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.454770 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerName="registry-server" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.454971 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="137d7e14-9d53-450f-90ed-3c1ed4e6aed2" containerName="registry-server" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.456552 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.472034 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdcm7"] Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.515717 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-utilities\") pod \"redhat-marketplace-rdcm7\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.515771 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpklb\" (UniqueName: \"kubernetes.io/projected/23ed64ed-125d-4e67-a65b-532ff03ee01a-kube-api-access-vpklb\") pod \"redhat-marketplace-rdcm7\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.515814 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-catalog-content\") pod \"redhat-marketplace-rdcm7\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.616661 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-utilities\") pod \"redhat-marketplace-rdcm7\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.616721 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpklb\" (UniqueName: \"kubernetes.io/projected/23ed64ed-125d-4e67-a65b-532ff03ee01a-kube-api-access-vpklb\") pod \"redhat-marketplace-rdcm7\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.616764 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-catalog-content\") pod \"redhat-marketplace-rdcm7\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.617435 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-utilities\") pod \"redhat-marketplace-rdcm7\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.617520 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-catalog-content\") pod \"redhat-marketplace-rdcm7\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.648090 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpklb\" (UniqueName: \"kubernetes.io/projected/23ed64ed-125d-4e67-a65b-532ff03ee01a-kube-api-access-vpklb\") pod \"redhat-marketplace-rdcm7\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:21 crc kubenswrapper[4876]: I1215 07:36:21.792918 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:22 crc kubenswrapper[4876]: I1215 07:36:22.263566 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdcm7"] Dec 15 07:36:22 crc kubenswrapper[4876]: W1215 07:36:22.274248 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23ed64ed_125d_4e67_a65b_532ff03ee01a.slice/crio-e9c091044a87c2dc0ca5721bedbb5a4c51e46e5ebe1f5b227ce6866add77393b WatchSource:0}: Error finding container e9c091044a87c2dc0ca5721bedbb5a4c51e46e5ebe1f5b227ce6866add77393b: Status 404 returned error can't find the container with id e9c091044a87c2dc0ca5721bedbb5a4c51e46e5ebe1f5b227ce6866add77393b Dec 15 07:36:22 crc kubenswrapper[4876]: I1215 07:36:22.290521 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdcm7" event={"ID":"23ed64ed-125d-4e67-a65b-532ff03ee01a","Type":"ContainerStarted","Data":"e9c091044a87c2dc0ca5721bedbb5a4c51e46e5ebe1f5b227ce6866add77393b"} Dec 15 07:36:23 crc kubenswrapper[4876]: I1215 07:36:23.301670 4876 generic.go:334] "Generic (PLEG): container finished" podID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerID="130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960" exitCode=0 Dec 15 07:36:23 crc kubenswrapper[4876]: I1215 07:36:23.301807 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdcm7" event={"ID":"23ed64ed-125d-4e67-a65b-532ff03ee01a","Type":"ContainerDied","Data":"130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960"} Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.231629 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b9hcv"] Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.241610 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.241663 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b9hcv"] Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.264154 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-catalog-content\") pod \"redhat-operators-b9hcv\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.264207 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-utilities\") pod \"redhat-operators-b9hcv\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.264287 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86xzp\" (UniqueName: \"kubernetes.io/projected/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-kube-api-access-86xzp\") pod \"redhat-operators-b9hcv\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.315853 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdcm7" event={"ID":"23ed64ed-125d-4e67-a65b-532ff03ee01a","Type":"ContainerStarted","Data":"545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2"} Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.365221 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86xzp\" (UniqueName: \"kubernetes.io/projected/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-kube-api-access-86xzp\") pod \"redhat-operators-b9hcv\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.365306 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-catalog-content\") pod \"redhat-operators-b9hcv\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.365327 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-utilities\") pod \"redhat-operators-b9hcv\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.365761 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-utilities\") pod \"redhat-operators-b9hcv\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.365829 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-catalog-content\") pod \"redhat-operators-b9hcv\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.397893 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86xzp\" (UniqueName: \"kubernetes.io/projected/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-kube-api-access-86xzp\") pod \"redhat-operators-b9hcv\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:24 crc kubenswrapper[4876]: I1215 07:36:24.577518 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:25 crc kubenswrapper[4876]: I1215 07:36:25.011779 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b9hcv"] Dec 15 07:36:25 crc kubenswrapper[4876]: I1215 07:36:25.323541 4876 generic.go:334] "Generic (PLEG): container finished" podID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerID="545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2" exitCode=0 Dec 15 07:36:25 crc kubenswrapper[4876]: I1215 07:36:25.323627 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdcm7" event={"ID":"23ed64ed-125d-4e67-a65b-532ff03ee01a","Type":"ContainerDied","Data":"545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2"} Dec 15 07:36:25 crc kubenswrapper[4876]: I1215 07:36:25.325728 4876 generic.go:334] "Generic (PLEG): container finished" podID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerID="9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a" exitCode=0 Dec 15 07:36:25 crc kubenswrapper[4876]: I1215 07:36:25.325759 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9hcv" event={"ID":"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e","Type":"ContainerDied","Data":"9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a"} Dec 15 07:36:25 crc kubenswrapper[4876]: I1215 07:36:25.325819 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9hcv" event={"ID":"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e","Type":"ContainerStarted","Data":"3a659a711c307649a308040818ee455f704de860b6f35c8f6deaff9a394cd6e7"} Dec 15 07:36:26 crc kubenswrapper[4876]: I1215 07:36:26.334942 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdcm7" event={"ID":"23ed64ed-125d-4e67-a65b-532ff03ee01a","Type":"ContainerStarted","Data":"099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a"} Dec 15 07:36:26 crc kubenswrapper[4876]: I1215 07:36:26.337395 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9hcv" event={"ID":"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e","Type":"ContainerStarted","Data":"68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396"} Dec 15 07:36:26 crc kubenswrapper[4876]: I1215 07:36:26.355389 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rdcm7" podStartSLOduration=2.6590834020000003 podStartE2EDuration="5.35536961s" podCreationTimestamp="2025-12-15 07:36:21 +0000 UTC" firstStartedPulling="2025-12-15 07:36:23.304063905 +0000 UTC m=+2708.875206836" lastFinishedPulling="2025-12-15 07:36:26.000350133 +0000 UTC m=+2711.571493044" observedRunningTime="2025-12-15 07:36:26.353329364 +0000 UTC m=+2711.924472285" watchObservedRunningTime="2025-12-15 07:36:26.35536961 +0000 UTC m=+2711.926512521" Dec 15 07:36:27 crc kubenswrapper[4876]: I1215 07:36:27.322289 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:36:27 crc kubenswrapper[4876]: I1215 07:36:27.322643 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:36:27 crc kubenswrapper[4876]: I1215 07:36:27.322704 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:36:27 crc kubenswrapper[4876]: I1215 07:36:27.323462 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a4dde018a5df171a89b4d2936e77ded8d335ffd3cc641e22bc483b9f53c3460e"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:36:27 crc kubenswrapper[4876]: I1215 07:36:27.323549 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://a4dde018a5df171a89b4d2936e77ded8d335ffd3cc641e22bc483b9f53c3460e" gracePeriod=600 Dec 15 07:36:27 crc kubenswrapper[4876]: I1215 07:36:27.345950 4876 generic.go:334] "Generic (PLEG): container finished" podID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerID="68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396" exitCode=0 Dec 15 07:36:27 crc kubenswrapper[4876]: I1215 07:36:27.347055 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9hcv" event={"ID":"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e","Type":"ContainerDied","Data":"68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396"} Dec 15 07:36:28 crc kubenswrapper[4876]: I1215 07:36:28.358816 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="a4dde018a5df171a89b4d2936e77ded8d335ffd3cc641e22bc483b9f53c3460e" exitCode=0 Dec 15 07:36:28 crc kubenswrapper[4876]: I1215 07:36:28.361268 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"a4dde018a5df171a89b4d2936e77ded8d335ffd3cc641e22bc483b9f53c3460e"} Dec 15 07:36:28 crc kubenswrapper[4876]: I1215 07:36:28.361446 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b"} Dec 15 07:36:28 crc kubenswrapper[4876]: I1215 07:36:28.361641 4876 scope.go:117] "RemoveContainer" containerID="5757ec0d352adfcf3fc66b8903b81ac515617e124af5750775f204a86df160ae" Dec 15 07:36:28 crc kubenswrapper[4876]: I1215 07:36:28.367813 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9hcv" event={"ID":"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e","Type":"ContainerStarted","Data":"f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36"} Dec 15 07:36:28 crc kubenswrapper[4876]: I1215 07:36:28.410411 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b9hcv" podStartSLOduration=1.870628268 podStartE2EDuration="4.410391042s" podCreationTimestamp="2025-12-15 07:36:24 +0000 UTC" firstStartedPulling="2025-12-15 07:36:25.327447373 +0000 UTC m=+2710.898590284" lastFinishedPulling="2025-12-15 07:36:27.867210157 +0000 UTC m=+2713.438353058" observedRunningTime="2025-12-15 07:36:28.408927882 +0000 UTC m=+2713.980070803" watchObservedRunningTime="2025-12-15 07:36:28.410391042 +0000 UTC m=+2713.981533953" Dec 15 07:36:31 crc kubenswrapper[4876]: I1215 07:36:31.794076 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:31 crc kubenswrapper[4876]: I1215 07:36:31.794597 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:31 crc kubenswrapper[4876]: I1215 07:36:31.850666 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:32 crc kubenswrapper[4876]: I1215 07:36:32.474669 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:33 crc kubenswrapper[4876]: I1215 07:36:33.013084 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdcm7"] Dec 15 07:36:34 crc kubenswrapper[4876]: I1215 07:36:34.412276 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rdcm7" podUID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerName="registry-server" containerID="cri-o://099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a" gracePeriod=2 Dec 15 07:36:34 crc kubenswrapper[4876]: I1215 07:36:34.578905 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:34 crc kubenswrapper[4876]: I1215 07:36:34.579433 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:34 crc kubenswrapper[4876]: I1215 07:36:34.622085 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.357211 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.423261 4876 generic.go:334] "Generic (PLEG): container finished" podID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerID="099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a" exitCode=0 Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.424301 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rdcm7" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.424291 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdcm7" event={"ID":"23ed64ed-125d-4e67-a65b-532ff03ee01a","Type":"ContainerDied","Data":"099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a"} Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.424610 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rdcm7" event={"ID":"23ed64ed-125d-4e67-a65b-532ff03ee01a","Type":"ContainerDied","Data":"e9c091044a87c2dc0ca5721bedbb5a4c51e46e5ebe1f5b227ce6866add77393b"} Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.424632 4876 scope.go:117] "RemoveContainer" containerID="099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.449090 4876 scope.go:117] "RemoveContainer" containerID="545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.474547 4876 scope.go:117] "RemoveContainer" containerID="130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.476458 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.501081 4876 scope.go:117] "RemoveContainer" containerID="099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a" Dec 15 07:36:35 crc kubenswrapper[4876]: E1215 07:36:35.501813 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a\": container with ID starting with 099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a not found: ID does not exist" containerID="099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.501876 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a"} err="failed to get container status \"099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a\": rpc error: code = NotFound desc = could not find container \"099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a\": container with ID starting with 099864ba7a05c7801bd33fb851d11802439a58f7e1dd55c1505527669d6bc01a not found: ID does not exist" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.501920 4876 scope.go:117] "RemoveContainer" containerID="545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2" Dec 15 07:36:35 crc kubenswrapper[4876]: E1215 07:36:35.502230 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2\": container with ID starting with 545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2 not found: ID does not exist" containerID="545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.502258 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2"} err="failed to get container status \"545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2\": rpc error: code = NotFound desc = could not find container \"545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2\": container with ID starting with 545f0f5f76f7a41b3e7f937c63184e0a6e61a6c83b6951cea669c8c820b43da2 not found: ID does not exist" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.502278 4876 scope.go:117] "RemoveContainer" containerID="130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960" Dec 15 07:36:35 crc kubenswrapper[4876]: E1215 07:36:35.502641 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960\": container with ID starting with 130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960 not found: ID does not exist" containerID="130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.502708 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960"} err="failed to get container status \"130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960\": rpc error: code = NotFound desc = could not find container \"130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960\": container with ID starting with 130f78088445e03fd1b606cdd48fac4b5f0b1c07962d1103f92412d58f113960 not found: ID does not exist" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.543260 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-catalog-content\") pod \"23ed64ed-125d-4e67-a65b-532ff03ee01a\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.543437 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpklb\" (UniqueName: \"kubernetes.io/projected/23ed64ed-125d-4e67-a65b-532ff03ee01a-kube-api-access-vpklb\") pod \"23ed64ed-125d-4e67-a65b-532ff03ee01a\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.543464 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-utilities\") pod \"23ed64ed-125d-4e67-a65b-532ff03ee01a\" (UID: \"23ed64ed-125d-4e67-a65b-532ff03ee01a\") " Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.544441 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-utilities" (OuterVolumeSpecName: "utilities") pod "23ed64ed-125d-4e67-a65b-532ff03ee01a" (UID: "23ed64ed-125d-4e67-a65b-532ff03ee01a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.551272 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23ed64ed-125d-4e67-a65b-532ff03ee01a-kube-api-access-vpklb" (OuterVolumeSpecName: "kube-api-access-vpklb") pod "23ed64ed-125d-4e67-a65b-532ff03ee01a" (UID: "23ed64ed-125d-4e67-a65b-532ff03ee01a"). InnerVolumeSpecName "kube-api-access-vpklb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.582391 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23ed64ed-125d-4e67-a65b-532ff03ee01a" (UID: "23ed64ed-125d-4e67-a65b-532ff03ee01a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.644954 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpklb\" (UniqueName: \"kubernetes.io/projected/23ed64ed-125d-4e67-a65b-532ff03ee01a-kube-api-access-vpklb\") on node \"crc\" DevicePath \"\"" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.645013 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.645032 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23ed64ed-125d-4e67-a65b-532ff03ee01a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.765831 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdcm7"] Dec 15 07:36:35 crc kubenswrapper[4876]: I1215 07:36:35.774605 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rdcm7"] Dec 15 07:36:36 crc kubenswrapper[4876]: I1215 07:36:36.722400 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23ed64ed-125d-4e67-a65b-532ff03ee01a" path="/var/lib/kubelet/pods/23ed64ed-125d-4e67-a65b-532ff03ee01a/volumes" Dec 15 07:36:37 crc kubenswrapper[4876]: I1215 07:36:37.841914 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b9hcv"] Dec 15 07:36:37 crc kubenswrapper[4876]: I1215 07:36:37.845820 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b9hcv" podUID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerName="registry-server" containerID="cri-o://f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36" gracePeriod=2 Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.340865 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.448893 4876 generic.go:334] "Generic (PLEG): container finished" podID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerID="f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36" exitCode=0 Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.449000 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9hcv" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.448995 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9hcv" event={"ID":"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e","Type":"ContainerDied","Data":"f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36"} Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.449358 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9hcv" event={"ID":"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e","Type":"ContainerDied","Data":"3a659a711c307649a308040818ee455f704de860b6f35c8f6deaff9a394cd6e7"} Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.449378 4876 scope.go:117] "RemoveContainer" containerID="f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.466040 4876 scope.go:117] "RemoveContainer" containerID="68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.478408 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86xzp\" (UniqueName: \"kubernetes.io/projected/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-kube-api-access-86xzp\") pod \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.478451 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-catalog-content\") pod \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.478520 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-utilities\") pod \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\" (UID: \"fa2f9e7e-8d3e-4f93-b96a-241a878dea5e\") " Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.479702 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-utilities" (OuterVolumeSpecName: "utilities") pod "fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" (UID: "fa2f9e7e-8d3e-4f93-b96a-241a878dea5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.482959 4876 scope.go:117] "RemoveContainer" containerID="9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.484441 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-kube-api-access-86xzp" (OuterVolumeSpecName: "kube-api-access-86xzp") pod "fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" (UID: "fa2f9e7e-8d3e-4f93-b96a-241a878dea5e"). InnerVolumeSpecName "kube-api-access-86xzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.545566 4876 scope.go:117] "RemoveContainer" containerID="f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36" Dec 15 07:36:38 crc kubenswrapper[4876]: E1215 07:36:38.546001 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36\": container with ID starting with f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36 not found: ID does not exist" containerID="f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.546064 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36"} err="failed to get container status \"f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36\": rpc error: code = NotFound desc = could not find container \"f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36\": container with ID starting with f97d07baedac83ea8a2af8077a7942678cea31cf7bed04d571d699000fe72f36 not found: ID does not exist" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.546087 4876 scope.go:117] "RemoveContainer" containerID="68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396" Dec 15 07:36:38 crc kubenswrapper[4876]: E1215 07:36:38.546446 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396\": container with ID starting with 68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396 not found: ID does not exist" containerID="68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.546468 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396"} err="failed to get container status \"68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396\": rpc error: code = NotFound desc = could not find container \"68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396\": container with ID starting with 68e98f6ee3030abde13eece2cb842c8c62c3f068418c66f53a94ad4e0a389396 not found: ID does not exist" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.546481 4876 scope.go:117] "RemoveContainer" containerID="9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a" Dec 15 07:36:38 crc kubenswrapper[4876]: E1215 07:36:38.546689 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a\": container with ID starting with 9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a not found: ID does not exist" containerID="9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.546714 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a"} err="failed to get container status \"9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a\": rpc error: code = NotFound desc = could not find container \"9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a\": container with ID starting with 9ca8c838f4a9e9c6ec58b3ce1f11f816491c99cc36008798f2a30de888adb35a not found: ID does not exist" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.581053 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86xzp\" (UniqueName: \"kubernetes.io/projected/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-kube-api-access-86xzp\") on node \"crc\" DevicePath \"\"" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.581087 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.605603 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" (UID: "fa2f9e7e-8d3e-4f93-b96a-241a878dea5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.682138 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.775094 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b9hcv"] Dec 15 07:36:38 crc kubenswrapper[4876]: I1215 07:36:38.781023 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b9hcv"] Dec 15 07:36:40 crc kubenswrapper[4876]: I1215 07:36:40.723847 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" path="/var/lib/kubelet/pods/fa2f9e7e-8d3e-4f93-b96a-241a878dea5e/volumes" Dec 15 07:38:27 crc kubenswrapper[4876]: I1215 07:38:27.322540 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:38:27 crc kubenswrapper[4876]: I1215 07:38:27.323406 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.506681 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c4c7p"] Dec 15 07:38:37 crc kubenswrapper[4876]: E1215 07:38:37.507584 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerName="extract-content" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.507600 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerName="extract-content" Dec 15 07:38:37 crc kubenswrapper[4876]: E1215 07:38:37.507613 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerName="registry-server" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.507620 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerName="registry-server" Dec 15 07:38:37 crc kubenswrapper[4876]: E1215 07:38:37.507632 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerName="extract-utilities" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.507639 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerName="extract-utilities" Dec 15 07:38:37 crc kubenswrapper[4876]: E1215 07:38:37.507663 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerName="extract-content" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.507671 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerName="extract-content" Dec 15 07:38:37 crc kubenswrapper[4876]: E1215 07:38:37.507694 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerName="registry-server" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.507701 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerName="registry-server" Dec 15 07:38:37 crc kubenswrapper[4876]: E1215 07:38:37.507715 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerName="extract-utilities" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.507723 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerName="extract-utilities" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.507892 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="23ed64ed-125d-4e67-a65b-532ff03ee01a" containerName="registry-server" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.507911 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa2f9e7e-8d3e-4f93-b96a-241a878dea5e" containerName="registry-server" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.509059 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.578341 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c4c7p"] Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.630931 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-utilities\") pod \"certified-operators-c4c7p\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.631427 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-catalog-content\") pod \"certified-operators-c4c7p\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.631477 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg6kj\" (UniqueName: \"kubernetes.io/projected/a856ef89-b3d4-4773-9905-8e629ae460db-kube-api-access-jg6kj\") pod \"certified-operators-c4c7p\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.733727 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-catalog-content\") pod \"certified-operators-c4c7p\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.733812 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg6kj\" (UniqueName: \"kubernetes.io/projected/a856ef89-b3d4-4773-9905-8e629ae460db-kube-api-access-jg6kj\") pod \"certified-operators-c4c7p\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.733880 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-utilities\") pod \"certified-operators-c4c7p\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.734693 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-utilities\") pod \"certified-operators-c4c7p\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.734753 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-catalog-content\") pod \"certified-operators-c4c7p\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.757823 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg6kj\" (UniqueName: \"kubernetes.io/projected/a856ef89-b3d4-4773-9905-8e629ae460db-kube-api-access-jg6kj\") pod \"certified-operators-c4c7p\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:37 crc kubenswrapper[4876]: I1215 07:38:37.833725 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:38 crc kubenswrapper[4876]: I1215 07:38:38.329528 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c4c7p"] Dec 15 07:38:38 crc kubenswrapper[4876]: I1215 07:38:38.391985 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4c7p" event={"ID":"a856ef89-b3d4-4773-9905-8e629ae460db","Type":"ContainerStarted","Data":"9305c2055b44ce7cf1228e49508eabf91ec9a9e61b3dfbbe00cd1bdd1bfc7c59"} Dec 15 07:38:39 crc kubenswrapper[4876]: I1215 07:38:39.402602 4876 generic.go:334] "Generic (PLEG): container finished" podID="a856ef89-b3d4-4773-9905-8e629ae460db" containerID="d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe" exitCode=0 Dec 15 07:38:39 crc kubenswrapper[4876]: I1215 07:38:39.402672 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4c7p" event={"ID":"a856ef89-b3d4-4773-9905-8e629ae460db","Type":"ContainerDied","Data":"d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe"} Dec 15 07:38:39 crc kubenswrapper[4876]: I1215 07:38:39.406652 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 07:38:40 crc kubenswrapper[4876]: I1215 07:38:40.416293 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4c7p" event={"ID":"a856ef89-b3d4-4773-9905-8e629ae460db","Type":"ContainerStarted","Data":"8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d"} Dec 15 07:38:41 crc kubenswrapper[4876]: I1215 07:38:41.422559 4876 generic.go:334] "Generic (PLEG): container finished" podID="a856ef89-b3d4-4773-9905-8e629ae460db" containerID="8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d" exitCode=0 Dec 15 07:38:41 crc kubenswrapper[4876]: I1215 07:38:41.422641 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4c7p" event={"ID":"a856ef89-b3d4-4773-9905-8e629ae460db","Type":"ContainerDied","Data":"8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d"} Dec 15 07:38:42 crc kubenswrapper[4876]: I1215 07:38:42.431444 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4c7p" event={"ID":"a856ef89-b3d4-4773-9905-8e629ae460db","Type":"ContainerStarted","Data":"9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253"} Dec 15 07:38:42 crc kubenswrapper[4876]: I1215 07:38:42.478192 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c4c7p" podStartSLOduration=3.02587404 podStartE2EDuration="5.478174794s" podCreationTimestamp="2025-12-15 07:38:37 +0000 UTC" firstStartedPulling="2025-12-15 07:38:39.406322339 +0000 UTC m=+2844.977465250" lastFinishedPulling="2025-12-15 07:38:41.858623053 +0000 UTC m=+2847.429766004" observedRunningTime="2025-12-15 07:38:42.477501086 +0000 UTC m=+2848.048644007" watchObservedRunningTime="2025-12-15 07:38:42.478174794 +0000 UTC m=+2848.049317735" Dec 15 07:38:47 crc kubenswrapper[4876]: I1215 07:38:47.834147 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:47 crc kubenswrapper[4876]: I1215 07:38:47.835304 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:47 crc kubenswrapper[4876]: I1215 07:38:47.908267 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:48 crc kubenswrapper[4876]: I1215 07:38:48.516143 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:48 crc kubenswrapper[4876]: I1215 07:38:48.561273 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c4c7p"] Dec 15 07:38:50 crc kubenswrapper[4876]: I1215 07:38:50.488217 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c4c7p" podUID="a856ef89-b3d4-4773-9905-8e629ae460db" containerName="registry-server" containerID="cri-o://9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253" gracePeriod=2 Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.445510 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.501297 4876 generic.go:334] "Generic (PLEG): container finished" podID="a856ef89-b3d4-4773-9905-8e629ae460db" containerID="9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253" exitCode=0 Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.501336 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4c7p" event={"ID":"a856ef89-b3d4-4773-9905-8e629ae460db","Type":"ContainerDied","Data":"9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253"} Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.501355 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c4c7p" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.501371 4876 scope.go:117] "RemoveContainer" containerID="9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.501359 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4c7p" event={"ID":"a856ef89-b3d4-4773-9905-8e629ae460db","Type":"ContainerDied","Data":"9305c2055b44ce7cf1228e49508eabf91ec9a9e61b3dfbbe00cd1bdd1bfc7c59"} Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.519725 4876 scope.go:117] "RemoveContainer" containerID="8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.549950 4876 scope.go:117] "RemoveContainer" containerID="d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.554322 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jg6kj\" (UniqueName: \"kubernetes.io/projected/a856ef89-b3d4-4773-9905-8e629ae460db-kube-api-access-jg6kj\") pod \"a856ef89-b3d4-4773-9905-8e629ae460db\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.554426 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-utilities\") pod \"a856ef89-b3d4-4773-9905-8e629ae460db\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.554497 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-catalog-content\") pod \"a856ef89-b3d4-4773-9905-8e629ae460db\" (UID: \"a856ef89-b3d4-4773-9905-8e629ae460db\") " Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.555334 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-utilities" (OuterVolumeSpecName: "utilities") pod "a856ef89-b3d4-4773-9905-8e629ae460db" (UID: "a856ef89-b3d4-4773-9905-8e629ae460db"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.560510 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a856ef89-b3d4-4773-9905-8e629ae460db-kube-api-access-jg6kj" (OuterVolumeSpecName: "kube-api-access-jg6kj") pod "a856ef89-b3d4-4773-9905-8e629ae460db" (UID: "a856ef89-b3d4-4773-9905-8e629ae460db"). InnerVolumeSpecName "kube-api-access-jg6kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.566798 4876 scope.go:117] "RemoveContainer" containerID="9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253" Dec 15 07:38:51 crc kubenswrapper[4876]: E1215 07:38:51.568061 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253\": container with ID starting with 9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253 not found: ID does not exist" containerID="9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.568090 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253"} err="failed to get container status \"9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253\": rpc error: code = NotFound desc = could not find container \"9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253\": container with ID starting with 9773840e1c849f568f585cc349ccfd3d35983217675ebecefd58a950c27b6253 not found: ID does not exist" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.568126 4876 scope.go:117] "RemoveContainer" containerID="8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d" Dec 15 07:38:51 crc kubenswrapper[4876]: E1215 07:38:51.568436 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d\": container with ID starting with 8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d not found: ID does not exist" containerID="8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.568456 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d"} err="failed to get container status \"8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d\": rpc error: code = NotFound desc = could not find container \"8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d\": container with ID starting with 8f573e8162269568295c3ef84c65f1feba2ca35ba44f5889fd4d39cbdc82704d not found: ID does not exist" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.568470 4876 scope.go:117] "RemoveContainer" containerID="d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe" Dec 15 07:38:51 crc kubenswrapper[4876]: E1215 07:38:51.568708 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe\": container with ID starting with d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe not found: ID does not exist" containerID="d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.568730 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe"} err="failed to get container status \"d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe\": rpc error: code = NotFound desc = could not find container \"d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe\": container with ID starting with d0f86370a9a007b8a8f1374db05ac1da62f0e89d2656e12ac256d0a2288d3bbe not found: ID does not exist" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.627565 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a856ef89-b3d4-4773-9905-8e629ae460db" (UID: "a856ef89-b3d4-4773-9905-8e629ae460db"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.656821 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.656851 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a856ef89-b3d4-4773-9905-8e629ae460db-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.656862 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jg6kj\" (UniqueName: \"kubernetes.io/projected/a856ef89-b3d4-4773-9905-8e629ae460db-kube-api-access-jg6kj\") on node \"crc\" DevicePath \"\"" Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.850793 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c4c7p"] Dec 15 07:38:51 crc kubenswrapper[4876]: I1215 07:38:51.857130 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c4c7p"] Dec 15 07:38:52 crc kubenswrapper[4876]: I1215 07:38:52.718810 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a856ef89-b3d4-4773-9905-8e629ae460db" path="/var/lib/kubelet/pods/a856ef89-b3d4-4773-9905-8e629ae460db/volumes" Dec 15 07:38:57 crc kubenswrapper[4876]: I1215 07:38:57.322796 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:38:57 crc kubenswrapper[4876]: I1215 07:38:57.323785 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:39:27 crc kubenswrapper[4876]: I1215 07:39:27.323293 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:39:27 crc kubenswrapper[4876]: I1215 07:39:27.324365 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:39:27 crc kubenswrapper[4876]: I1215 07:39:27.324505 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:39:27 crc kubenswrapper[4876]: I1215 07:39:27.325274 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:39:27 crc kubenswrapper[4876]: I1215 07:39:27.325356 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" gracePeriod=600 Dec 15 07:39:27 crc kubenswrapper[4876]: E1215 07:39:27.474379 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:39:27 crc kubenswrapper[4876]: I1215 07:39:27.840706 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" exitCode=0 Dec 15 07:39:27 crc kubenswrapper[4876]: I1215 07:39:27.840751 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b"} Dec 15 07:39:27 crc kubenswrapper[4876]: I1215 07:39:27.840816 4876 scope.go:117] "RemoveContainer" containerID="a4dde018a5df171a89b4d2936e77ded8d335ffd3cc641e22bc483b9f53c3460e" Dec 15 07:39:27 crc kubenswrapper[4876]: I1215 07:39:27.841414 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:39:27 crc kubenswrapper[4876]: E1215 07:39:27.841762 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:39:39 crc kubenswrapper[4876]: I1215 07:39:39.705065 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:39:39 crc kubenswrapper[4876]: E1215 07:39:39.705825 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:39:50 crc kubenswrapper[4876]: I1215 07:39:50.706362 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:39:50 crc kubenswrapper[4876]: E1215 07:39:50.708248 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:40:03 crc kubenswrapper[4876]: I1215 07:40:03.705656 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:40:03 crc kubenswrapper[4876]: E1215 07:40:03.706691 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:40:15 crc kubenswrapper[4876]: I1215 07:40:15.705623 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:40:15 crc kubenswrapper[4876]: E1215 07:40:15.706472 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:40:27 crc kubenswrapper[4876]: I1215 07:40:27.705447 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:40:27 crc kubenswrapper[4876]: E1215 07:40:27.706225 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:40:38 crc kubenswrapper[4876]: I1215 07:40:38.705750 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:40:38 crc kubenswrapper[4876]: E1215 07:40:38.706399 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:40:49 crc kubenswrapper[4876]: I1215 07:40:49.705770 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:40:49 crc kubenswrapper[4876]: E1215 07:40:49.706968 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:41:04 crc kubenswrapper[4876]: I1215 07:41:04.714729 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:41:04 crc kubenswrapper[4876]: E1215 07:41:04.715829 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:41:18 crc kubenswrapper[4876]: I1215 07:41:18.753667 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:41:18 crc kubenswrapper[4876]: E1215 07:41:18.754444 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:41:32 crc kubenswrapper[4876]: I1215 07:41:32.706731 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:41:32 crc kubenswrapper[4876]: E1215 07:41:32.707952 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:41:45 crc kubenswrapper[4876]: I1215 07:41:45.705512 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:41:45 crc kubenswrapper[4876]: E1215 07:41:45.706394 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:41:56 crc kubenswrapper[4876]: I1215 07:41:56.706162 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:41:56 crc kubenswrapper[4876]: E1215 07:41:56.707636 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:42:09 crc kubenswrapper[4876]: I1215 07:42:09.705512 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:42:09 crc kubenswrapper[4876]: E1215 07:42:09.706373 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:42:23 crc kubenswrapper[4876]: I1215 07:42:23.706473 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:42:23 crc kubenswrapper[4876]: E1215 07:42:23.707564 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:42:34 crc kubenswrapper[4876]: I1215 07:42:34.714539 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:42:34 crc kubenswrapper[4876]: E1215 07:42:34.715574 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:42:48 crc kubenswrapper[4876]: I1215 07:42:48.706458 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:42:48 crc kubenswrapper[4876]: E1215 07:42:48.707547 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:43:01 crc kubenswrapper[4876]: I1215 07:43:01.706447 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:43:01 crc kubenswrapper[4876]: E1215 07:43:01.707372 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:43:14 crc kubenswrapper[4876]: I1215 07:43:14.708614 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:43:14 crc kubenswrapper[4876]: E1215 07:43:14.709292 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:43:29 crc kubenswrapper[4876]: I1215 07:43:29.705945 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:43:29 crc kubenswrapper[4876]: E1215 07:43:29.706860 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:43:41 crc kubenswrapper[4876]: I1215 07:43:41.705788 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:43:41 crc kubenswrapper[4876]: E1215 07:43:41.706655 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.418975 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fqt9q"] Dec 15 07:43:52 crc kubenswrapper[4876]: E1215 07:43:52.419980 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a856ef89-b3d4-4773-9905-8e629ae460db" containerName="extract-content" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.420002 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a856ef89-b3d4-4773-9905-8e629ae460db" containerName="extract-content" Dec 15 07:43:52 crc kubenswrapper[4876]: E1215 07:43:52.420042 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a856ef89-b3d4-4773-9905-8e629ae460db" containerName="extract-utilities" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.420054 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a856ef89-b3d4-4773-9905-8e629ae460db" containerName="extract-utilities" Dec 15 07:43:52 crc kubenswrapper[4876]: E1215 07:43:52.420079 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a856ef89-b3d4-4773-9905-8e629ae460db" containerName="registry-server" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.420091 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a856ef89-b3d4-4773-9905-8e629ae460db" containerName="registry-server" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.420349 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a856ef89-b3d4-4773-9905-8e629ae460db" containerName="registry-server" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.421883 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.429749 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-catalog-content\") pod \"community-operators-fqt9q\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.430256 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-utilities\") pod \"community-operators-fqt9q\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.430483 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-998ps\" (UniqueName: \"kubernetes.io/projected/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-kube-api-access-998ps\") pod \"community-operators-fqt9q\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.450141 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fqt9q"] Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.532327 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-utilities\") pod \"community-operators-fqt9q\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.532393 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-998ps\" (UniqueName: \"kubernetes.io/projected/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-kube-api-access-998ps\") pod \"community-operators-fqt9q\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.532430 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-catalog-content\") pod \"community-operators-fqt9q\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.532999 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-catalog-content\") pod \"community-operators-fqt9q\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.533507 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-utilities\") pod \"community-operators-fqt9q\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.565182 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-998ps\" (UniqueName: \"kubernetes.io/projected/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-kube-api-access-998ps\") pod \"community-operators-fqt9q\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:52 crc kubenswrapper[4876]: I1215 07:43:52.764819 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:43:53 crc kubenswrapper[4876]: I1215 07:43:53.077824 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fqt9q"] Dec 15 07:43:53 crc kubenswrapper[4876]: I1215 07:43:53.996721 4876 generic.go:334] "Generic (PLEG): container finished" podID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerID="79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850" exitCode=0 Dec 15 07:43:53 crc kubenswrapper[4876]: I1215 07:43:53.996765 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqt9q" event={"ID":"a88514aa-38ba-4bed-a40e-c7afc1b24c7b","Type":"ContainerDied","Data":"79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850"} Dec 15 07:43:53 crc kubenswrapper[4876]: I1215 07:43:53.996790 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqt9q" event={"ID":"a88514aa-38ba-4bed-a40e-c7afc1b24c7b","Type":"ContainerStarted","Data":"b5794af0ededf78e5b37fbee882c082d5ba4ba4fa4f8e35dc743a5e699657c61"} Dec 15 07:43:54 crc kubenswrapper[4876]: I1215 07:43:54.000169 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 07:43:54 crc kubenswrapper[4876]: I1215 07:43:54.710761 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:43:54 crc kubenswrapper[4876]: E1215 07:43:54.711348 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:43:55 crc kubenswrapper[4876]: I1215 07:43:55.006004 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqt9q" event={"ID":"a88514aa-38ba-4bed-a40e-c7afc1b24c7b","Type":"ContainerStarted","Data":"00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5"} Dec 15 07:43:56 crc kubenswrapper[4876]: I1215 07:43:56.014509 4876 generic.go:334] "Generic (PLEG): container finished" podID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerID="00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5" exitCode=0 Dec 15 07:43:56 crc kubenswrapper[4876]: I1215 07:43:56.014804 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqt9q" event={"ID":"a88514aa-38ba-4bed-a40e-c7afc1b24c7b","Type":"ContainerDied","Data":"00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5"} Dec 15 07:43:57 crc kubenswrapper[4876]: I1215 07:43:57.022891 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqt9q" event={"ID":"a88514aa-38ba-4bed-a40e-c7afc1b24c7b","Type":"ContainerStarted","Data":"4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6"} Dec 15 07:43:57 crc kubenswrapper[4876]: I1215 07:43:57.040069 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fqt9q" podStartSLOduration=2.550721993 podStartE2EDuration="5.040050113s" podCreationTimestamp="2025-12-15 07:43:52 +0000 UTC" firstStartedPulling="2025-12-15 07:43:53.999868489 +0000 UTC m=+3159.571011390" lastFinishedPulling="2025-12-15 07:43:56.489196589 +0000 UTC m=+3162.060339510" observedRunningTime="2025-12-15 07:43:57.039455267 +0000 UTC m=+3162.610598208" watchObservedRunningTime="2025-12-15 07:43:57.040050113 +0000 UTC m=+3162.611193024" Dec 15 07:44:02 crc kubenswrapper[4876]: I1215 07:44:02.766526 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:44:02 crc kubenswrapper[4876]: I1215 07:44:02.766909 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:44:02 crc kubenswrapper[4876]: I1215 07:44:02.810134 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:44:03 crc kubenswrapper[4876]: I1215 07:44:03.099247 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:44:03 crc kubenswrapper[4876]: I1215 07:44:03.144915 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fqt9q"] Dec 15 07:44:05 crc kubenswrapper[4876]: I1215 07:44:05.072369 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fqt9q" podUID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerName="registry-server" containerID="cri-o://4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6" gracePeriod=2 Dec 15 07:44:05 crc kubenswrapper[4876]: I1215 07:44:05.951304 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.079294 4876 generic.go:334] "Generic (PLEG): container finished" podID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerID="4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6" exitCode=0 Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.079340 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqt9q" event={"ID":"a88514aa-38ba-4bed-a40e-c7afc1b24c7b","Type":"ContainerDied","Data":"4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6"} Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.079372 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqt9q" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.079387 4876 scope.go:117] "RemoveContainer" containerID="4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.079374 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqt9q" event={"ID":"a88514aa-38ba-4bed-a40e-c7afc1b24c7b","Type":"ContainerDied","Data":"b5794af0ededf78e5b37fbee882c082d5ba4ba4fa4f8e35dc743a5e699657c61"} Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.102819 4876 scope.go:117] "RemoveContainer" containerID="00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.119273 4876 scope.go:117] "RemoveContainer" containerID="79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.124690 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-catalog-content\") pod \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.124786 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-utilities\") pod \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.124846 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-998ps\" (UniqueName: \"kubernetes.io/projected/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-kube-api-access-998ps\") pod \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\" (UID: \"a88514aa-38ba-4bed-a40e-c7afc1b24c7b\") " Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.126427 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-utilities" (OuterVolumeSpecName: "utilities") pod "a88514aa-38ba-4bed-a40e-c7afc1b24c7b" (UID: "a88514aa-38ba-4bed-a40e-c7afc1b24c7b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.131857 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-kube-api-access-998ps" (OuterVolumeSpecName: "kube-api-access-998ps") pod "a88514aa-38ba-4bed-a40e-c7afc1b24c7b" (UID: "a88514aa-38ba-4bed-a40e-c7afc1b24c7b"). InnerVolumeSpecName "kube-api-access-998ps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.167952 4876 scope.go:117] "RemoveContainer" containerID="4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6" Dec 15 07:44:06 crc kubenswrapper[4876]: E1215 07:44:06.168428 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6\": container with ID starting with 4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6 not found: ID does not exist" containerID="4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.168481 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6"} err="failed to get container status \"4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6\": rpc error: code = NotFound desc = could not find container \"4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6\": container with ID starting with 4d68d90ff4fa90a2542bb2cedff58be4a9b5fc398a1697e9db891103684eebe6 not found: ID does not exist" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.168512 4876 scope.go:117] "RemoveContainer" containerID="00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5" Dec 15 07:44:06 crc kubenswrapper[4876]: E1215 07:44:06.169081 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5\": container with ID starting with 00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5 not found: ID does not exist" containerID="00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.169251 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5"} err="failed to get container status \"00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5\": rpc error: code = NotFound desc = could not find container \"00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5\": container with ID starting with 00a3e8d2e7362b5a70a1995a8417112cf04d7582fa2ef64561af1191f46b10a5 not found: ID does not exist" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.169287 4876 scope.go:117] "RemoveContainer" containerID="79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850" Dec 15 07:44:06 crc kubenswrapper[4876]: E1215 07:44:06.169599 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850\": container with ID starting with 79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850 not found: ID does not exist" containerID="79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.169647 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850"} err="failed to get container status \"79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850\": rpc error: code = NotFound desc = could not find container \"79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850\": container with ID starting with 79d8c1710925297535261c22c1688c1b9c08cc0beea046d22efffe7f00708850 not found: ID does not exist" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.178601 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a88514aa-38ba-4bed-a40e-c7afc1b24c7b" (UID: "a88514aa-38ba-4bed-a40e-c7afc1b24c7b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.226578 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.226628 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.226642 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-998ps\" (UniqueName: \"kubernetes.io/projected/a88514aa-38ba-4bed-a40e-c7afc1b24c7b-kube-api-access-998ps\") on node \"crc\" DevicePath \"\"" Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.412471 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fqt9q"] Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.419408 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fqt9q"] Dec 15 07:44:06 crc kubenswrapper[4876]: I1215 07:44:06.715504 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" path="/var/lib/kubelet/pods/a88514aa-38ba-4bed-a40e-c7afc1b24c7b/volumes" Dec 15 07:44:08 crc kubenswrapper[4876]: I1215 07:44:08.705972 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:44:08 crc kubenswrapper[4876]: E1215 07:44:08.706287 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:44:19 crc kubenswrapper[4876]: I1215 07:44:19.706313 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:44:19 crc kubenswrapper[4876]: E1215 07:44:19.707529 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:44:32 crc kubenswrapper[4876]: I1215 07:44:32.705758 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:44:33 crc kubenswrapper[4876]: I1215 07:44:33.306395 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"3db4ab569f2408085c98313f82e791938c01c9f0bfb86f07b05f189b3572205d"} Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.143281 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq"] Dec 15 07:45:00 crc kubenswrapper[4876]: E1215 07:45:00.144197 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerName="extract-utilities" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.144213 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerName="extract-utilities" Dec 15 07:45:00 crc kubenswrapper[4876]: E1215 07:45:00.144227 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerName="registry-server" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.144235 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerName="registry-server" Dec 15 07:45:00 crc kubenswrapper[4876]: E1215 07:45:00.144253 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerName="extract-content" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.144263 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerName="extract-content" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.144408 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a88514aa-38ba-4bed-a40e-c7afc1b24c7b" containerName="registry-server" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.144971 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.147340 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.147911 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.160280 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq"] Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.173929 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ws9g\" (UniqueName: \"kubernetes.io/projected/26fa769a-15b5-43c0-ac55-8bae8cd62876-kube-api-access-5ws9g\") pod \"collect-profiles-29429745-gp6xq\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.174019 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/26fa769a-15b5-43c0-ac55-8bae8cd62876-secret-volume\") pod \"collect-profiles-29429745-gp6xq\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.174063 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26fa769a-15b5-43c0-ac55-8bae8cd62876-config-volume\") pod \"collect-profiles-29429745-gp6xq\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.275203 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ws9g\" (UniqueName: \"kubernetes.io/projected/26fa769a-15b5-43c0-ac55-8bae8cd62876-kube-api-access-5ws9g\") pod \"collect-profiles-29429745-gp6xq\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.275263 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/26fa769a-15b5-43c0-ac55-8bae8cd62876-secret-volume\") pod \"collect-profiles-29429745-gp6xq\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.275298 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26fa769a-15b5-43c0-ac55-8bae8cd62876-config-volume\") pod \"collect-profiles-29429745-gp6xq\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.276167 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26fa769a-15b5-43c0-ac55-8bae8cd62876-config-volume\") pod \"collect-profiles-29429745-gp6xq\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.288673 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/26fa769a-15b5-43c0-ac55-8bae8cd62876-secret-volume\") pod \"collect-profiles-29429745-gp6xq\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.291519 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ws9g\" (UniqueName: \"kubernetes.io/projected/26fa769a-15b5-43c0-ac55-8bae8cd62876-kube-api-access-5ws9g\") pod \"collect-profiles-29429745-gp6xq\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.477318 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:00 crc kubenswrapper[4876]: I1215 07:45:00.919772 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq"] Dec 15 07:45:01 crc kubenswrapper[4876]: I1215 07:45:01.562843 4876 generic.go:334] "Generic (PLEG): container finished" podID="26fa769a-15b5-43c0-ac55-8bae8cd62876" containerID="3ee6972b27e1cf82f92a4324425ec4564dd6322f980fdc4da9af6dd2f57c03d6" exitCode=0 Dec 15 07:45:01 crc kubenswrapper[4876]: I1215 07:45:01.562930 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" event={"ID":"26fa769a-15b5-43c0-ac55-8bae8cd62876","Type":"ContainerDied","Data":"3ee6972b27e1cf82f92a4324425ec4564dd6322f980fdc4da9af6dd2f57c03d6"} Dec 15 07:45:01 crc kubenswrapper[4876]: I1215 07:45:01.563354 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" event={"ID":"26fa769a-15b5-43c0-ac55-8bae8cd62876","Type":"ContainerStarted","Data":"00e63a126fcc13482c6d5ddc71dc414b829767d7824cc6fb65ab96f755624057"} Dec 15 07:45:02 crc kubenswrapper[4876]: I1215 07:45:02.887035 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.026491 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26fa769a-15b5-43c0-ac55-8bae8cd62876-config-volume\") pod \"26fa769a-15b5-43c0-ac55-8bae8cd62876\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.026628 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ws9g\" (UniqueName: \"kubernetes.io/projected/26fa769a-15b5-43c0-ac55-8bae8cd62876-kube-api-access-5ws9g\") pod \"26fa769a-15b5-43c0-ac55-8bae8cd62876\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.026780 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/26fa769a-15b5-43c0-ac55-8bae8cd62876-secret-volume\") pod \"26fa769a-15b5-43c0-ac55-8bae8cd62876\" (UID: \"26fa769a-15b5-43c0-ac55-8bae8cd62876\") " Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.027687 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26fa769a-15b5-43c0-ac55-8bae8cd62876-config-volume" (OuterVolumeSpecName: "config-volume") pod "26fa769a-15b5-43c0-ac55-8bae8cd62876" (UID: "26fa769a-15b5-43c0-ac55-8bae8cd62876"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.032386 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26fa769a-15b5-43c0-ac55-8bae8cd62876-kube-api-access-5ws9g" (OuterVolumeSpecName: "kube-api-access-5ws9g") pod "26fa769a-15b5-43c0-ac55-8bae8cd62876" (UID: "26fa769a-15b5-43c0-ac55-8bae8cd62876"). InnerVolumeSpecName "kube-api-access-5ws9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.033788 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26fa769a-15b5-43c0-ac55-8bae8cd62876-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "26fa769a-15b5-43c0-ac55-8bae8cd62876" (UID: "26fa769a-15b5-43c0-ac55-8bae8cd62876"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.128654 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26fa769a-15b5-43c0-ac55-8bae8cd62876-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.128710 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ws9g\" (UniqueName: \"kubernetes.io/projected/26fa769a-15b5-43c0-ac55-8bae8cd62876-kube-api-access-5ws9g\") on node \"crc\" DevicePath \"\"" Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.128731 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/26fa769a-15b5-43c0-ac55-8bae8cd62876-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.605445 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" event={"ID":"26fa769a-15b5-43c0-ac55-8bae8cd62876","Type":"ContainerDied","Data":"00e63a126fcc13482c6d5ddc71dc414b829767d7824cc6fb65ab96f755624057"} Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.605527 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00e63a126fcc13482c6d5ddc71dc414b829767d7824cc6fb65ab96f755624057" Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.605620 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq" Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.976996 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f"] Dec 15 07:45:03 crc kubenswrapper[4876]: I1215 07:45:03.983561 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429700-7xb8f"] Dec 15 07:45:04 crc kubenswrapper[4876]: I1215 07:45:04.715701 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b" path="/var/lib/kubelet/pods/4f989a45-dfc6-45d5-9b8b-ec5f38dfbb4b/volumes" Dec 15 07:45:21 crc kubenswrapper[4876]: I1215 07:45:21.438177 4876 scope.go:117] "RemoveContainer" containerID="c0c7a2bd1592e42e3397d66f19835e9c9f94dbfdd2b1ee6f205615abf667c0bb" Dec 15 07:46:57 crc kubenswrapper[4876]: I1215 07:46:57.322386 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:46:57 crc kubenswrapper[4876]: I1215 07:46:57.324368 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.092614 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zr9vh"] Dec 15 07:47:14 crc kubenswrapper[4876]: E1215 07:47:14.093557 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26fa769a-15b5-43c0-ac55-8bae8cd62876" containerName="collect-profiles" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.093576 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="26fa769a-15b5-43c0-ac55-8bae8cd62876" containerName="collect-profiles" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.093747 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="26fa769a-15b5-43c0-ac55-8bae8cd62876" containerName="collect-profiles" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.094971 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.116595 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zr9vh"] Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.259442 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnqdm\" (UniqueName: \"kubernetes.io/projected/b2484752-4ceb-40a1-ac34-925fc21b461d-kube-api-access-lnqdm\") pod \"redhat-operators-zr9vh\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.259508 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-catalog-content\") pod \"redhat-operators-zr9vh\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.259533 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-utilities\") pod \"redhat-operators-zr9vh\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.360969 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnqdm\" (UniqueName: \"kubernetes.io/projected/b2484752-4ceb-40a1-ac34-925fc21b461d-kube-api-access-lnqdm\") pod \"redhat-operators-zr9vh\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.361051 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-catalog-content\") pod \"redhat-operators-zr9vh\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.361095 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-utilities\") pod \"redhat-operators-zr9vh\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.361719 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-utilities\") pod \"redhat-operators-zr9vh\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.361797 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-catalog-content\") pod \"redhat-operators-zr9vh\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.382292 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnqdm\" (UniqueName: \"kubernetes.io/projected/b2484752-4ceb-40a1-ac34-925fc21b461d-kube-api-access-lnqdm\") pod \"redhat-operators-zr9vh\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.417648 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:14 crc kubenswrapper[4876]: I1215 07:47:14.888324 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zr9vh"] Dec 15 07:47:15 crc kubenswrapper[4876]: I1215 07:47:15.610355 4876 generic.go:334] "Generic (PLEG): container finished" podID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerID="97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63" exitCode=0 Dec 15 07:47:15 crc kubenswrapper[4876]: I1215 07:47:15.610417 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr9vh" event={"ID":"b2484752-4ceb-40a1-ac34-925fc21b461d","Type":"ContainerDied","Data":"97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63"} Dec 15 07:47:15 crc kubenswrapper[4876]: I1215 07:47:15.610662 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr9vh" event={"ID":"b2484752-4ceb-40a1-ac34-925fc21b461d","Type":"ContainerStarted","Data":"5b3a390e810e561fcc2e13c876b2b68fa12fc369e9c4008c11a2fbc08d014801"} Dec 15 07:47:16 crc kubenswrapper[4876]: I1215 07:47:16.618647 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr9vh" event={"ID":"b2484752-4ceb-40a1-ac34-925fc21b461d","Type":"ContainerStarted","Data":"8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003"} Dec 15 07:47:17 crc kubenswrapper[4876]: I1215 07:47:17.629782 4876 generic.go:334] "Generic (PLEG): container finished" podID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerID="8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003" exitCode=0 Dec 15 07:47:17 crc kubenswrapper[4876]: I1215 07:47:17.629835 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr9vh" event={"ID":"b2484752-4ceb-40a1-ac34-925fc21b461d","Type":"ContainerDied","Data":"8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003"} Dec 15 07:47:18 crc kubenswrapper[4876]: I1215 07:47:18.642302 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr9vh" event={"ID":"b2484752-4ceb-40a1-ac34-925fc21b461d","Type":"ContainerStarted","Data":"9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b"} Dec 15 07:47:18 crc kubenswrapper[4876]: I1215 07:47:18.666875 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zr9vh" podStartSLOduration=2.237718293 podStartE2EDuration="4.666855846s" podCreationTimestamp="2025-12-15 07:47:14 +0000 UTC" firstStartedPulling="2025-12-15 07:47:15.611732776 +0000 UTC m=+3361.182875687" lastFinishedPulling="2025-12-15 07:47:18.040870329 +0000 UTC m=+3363.612013240" observedRunningTime="2025-12-15 07:47:18.660564545 +0000 UTC m=+3364.231707466" watchObservedRunningTime="2025-12-15 07:47:18.666855846 +0000 UTC m=+3364.237998777" Dec 15 07:47:24 crc kubenswrapper[4876]: I1215 07:47:24.418195 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:24 crc kubenswrapper[4876]: I1215 07:47:24.418811 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:24 crc kubenswrapper[4876]: I1215 07:47:24.457942 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:24 crc kubenswrapper[4876]: I1215 07:47:24.724802 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:24 crc kubenswrapper[4876]: I1215 07:47:24.787617 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zr9vh"] Dec 15 07:47:26 crc kubenswrapper[4876]: I1215 07:47:26.696213 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zr9vh" podUID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerName="registry-server" containerID="cri-o://9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b" gracePeriod=2 Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.109751 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.198519 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-utilities\") pod \"b2484752-4ceb-40a1-ac34-925fc21b461d\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.198608 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnqdm\" (UniqueName: \"kubernetes.io/projected/b2484752-4ceb-40a1-ac34-925fc21b461d-kube-api-access-lnqdm\") pod \"b2484752-4ceb-40a1-ac34-925fc21b461d\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.198718 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-catalog-content\") pod \"b2484752-4ceb-40a1-ac34-925fc21b461d\" (UID: \"b2484752-4ceb-40a1-ac34-925fc21b461d\") " Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.199895 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-utilities" (OuterVolumeSpecName: "utilities") pod "b2484752-4ceb-40a1-ac34-925fc21b461d" (UID: "b2484752-4ceb-40a1-ac34-925fc21b461d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.208492 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2484752-4ceb-40a1-ac34-925fc21b461d-kube-api-access-lnqdm" (OuterVolumeSpecName: "kube-api-access-lnqdm") pod "b2484752-4ceb-40a1-ac34-925fc21b461d" (UID: "b2484752-4ceb-40a1-ac34-925fc21b461d"). InnerVolumeSpecName "kube-api-access-lnqdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.301350 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.301411 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnqdm\" (UniqueName: \"kubernetes.io/projected/b2484752-4ceb-40a1-ac34-925fc21b461d-kube-api-access-lnqdm\") on node \"crc\" DevicePath \"\"" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.323519 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.323623 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.708625 4876 generic.go:334] "Generic (PLEG): container finished" podID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerID="9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b" exitCode=0 Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.708688 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr9vh" event={"ID":"b2484752-4ceb-40a1-ac34-925fc21b461d","Type":"ContainerDied","Data":"9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b"} Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.708726 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zr9vh" event={"ID":"b2484752-4ceb-40a1-ac34-925fc21b461d","Type":"ContainerDied","Data":"5b3a390e810e561fcc2e13c876b2b68fa12fc369e9c4008c11a2fbc08d014801"} Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.708759 4876 scope.go:117] "RemoveContainer" containerID="9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.708922 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zr9vh" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.741637 4876 scope.go:117] "RemoveContainer" containerID="8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.763786 4876 scope.go:117] "RemoveContainer" containerID="97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.794312 4876 scope.go:117] "RemoveContainer" containerID="9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b" Dec 15 07:47:27 crc kubenswrapper[4876]: E1215 07:47:27.794895 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b\": container with ID starting with 9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b not found: ID does not exist" containerID="9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.794926 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b"} err="failed to get container status \"9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b\": rpc error: code = NotFound desc = could not find container \"9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b\": container with ID starting with 9db6c8eeb23935ea65e60ae595518329b913b88f2b60ed2ba489a3a6be84792b not found: ID does not exist" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.794946 4876 scope.go:117] "RemoveContainer" containerID="8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003" Dec 15 07:47:27 crc kubenswrapper[4876]: E1215 07:47:27.795801 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003\": container with ID starting with 8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003 not found: ID does not exist" containerID="8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.795821 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003"} err="failed to get container status \"8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003\": rpc error: code = NotFound desc = could not find container \"8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003\": container with ID starting with 8fdb0e17faafbd52409f76ea92534d8b69d7c5c4953ecd48abd5f90797bc3003 not found: ID does not exist" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.795832 4876 scope.go:117] "RemoveContainer" containerID="97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63" Dec 15 07:47:27 crc kubenswrapper[4876]: E1215 07:47:27.796177 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63\": container with ID starting with 97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63 not found: ID does not exist" containerID="97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63" Dec 15 07:47:27 crc kubenswrapper[4876]: I1215 07:47:27.796209 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63"} err="failed to get container status \"97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63\": rpc error: code = NotFound desc = could not find container \"97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63\": container with ID starting with 97ac2bb0d1ac328d35a28bd3bb5351fbc910bacc6a266779f221401e15f92d63 not found: ID does not exist" Dec 15 07:47:28 crc kubenswrapper[4876]: I1215 07:47:28.096708 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2484752-4ceb-40a1-ac34-925fc21b461d" (UID: "b2484752-4ceb-40a1-ac34-925fc21b461d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:47:28 crc kubenswrapper[4876]: I1215 07:47:28.115291 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2484752-4ceb-40a1-ac34-925fc21b461d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:47:28 crc kubenswrapper[4876]: I1215 07:47:28.353224 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zr9vh"] Dec 15 07:47:28 crc kubenswrapper[4876]: I1215 07:47:28.363388 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zr9vh"] Dec 15 07:47:28 crc kubenswrapper[4876]: I1215 07:47:28.722800 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2484752-4ceb-40a1-ac34-925fc21b461d" path="/var/lib/kubelet/pods/b2484752-4ceb-40a1-ac34-925fc21b461d/volumes" Dec 15 07:47:57 crc kubenswrapper[4876]: I1215 07:47:57.322754 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:47:57 crc kubenswrapper[4876]: I1215 07:47:57.323731 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:47:57 crc kubenswrapper[4876]: I1215 07:47:57.323805 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:47:57 crc kubenswrapper[4876]: I1215 07:47:57.324407 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3db4ab569f2408085c98313f82e791938c01c9f0bfb86f07b05f189b3572205d"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:47:57 crc kubenswrapper[4876]: I1215 07:47:57.324478 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://3db4ab569f2408085c98313f82e791938c01c9f0bfb86f07b05f189b3572205d" gracePeriod=600 Dec 15 07:47:57 crc kubenswrapper[4876]: I1215 07:47:57.934084 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="3db4ab569f2408085c98313f82e791938c01c9f0bfb86f07b05f189b3572205d" exitCode=0 Dec 15 07:47:57 crc kubenswrapper[4876]: I1215 07:47:57.934192 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"3db4ab569f2408085c98313f82e791938c01c9f0bfb86f07b05f189b3572205d"} Dec 15 07:47:57 crc kubenswrapper[4876]: I1215 07:47:57.935039 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021"} Dec 15 07:47:57 crc kubenswrapper[4876]: I1215 07:47:57.935096 4876 scope.go:117] "RemoveContainer" containerID="4e2ff306b042836c5eeb884d8586b3ee1df33b61679b1b9b011d8f9d6cd4690b" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.606884 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qksv2"] Dec 15 07:49:46 crc kubenswrapper[4876]: E1215 07:49:46.609225 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerName="extract-content" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.609357 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerName="extract-content" Dec 15 07:49:46 crc kubenswrapper[4876]: E1215 07:49:46.609444 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerName="extract-utilities" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.609514 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerName="extract-utilities" Dec 15 07:49:46 crc kubenswrapper[4876]: E1215 07:49:46.609582 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerName="registry-server" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.609648 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerName="registry-server" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.609891 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2484752-4ceb-40a1-ac34-925fc21b461d" containerName="registry-server" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.612910 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.627773 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qksv2"] Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.723409 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-catalog-content\") pod \"certified-operators-qksv2\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.723489 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-utilities\") pod \"certified-operators-qksv2\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.723529 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtlfl\" (UniqueName: \"kubernetes.io/projected/b8fff210-1c33-49ef-8484-ba168851ad90-kube-api-access-qtlfl\") pod \"certified-operators-qksv2\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.825339 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-catalog-content\") pod \"certified-operators-qksv2\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.825425 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-utilities\") pod \"certified-operators-qksv2\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.825455 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtlfl\" (UniqueName: \"kubernetes.io/projected/b8fff210-1c33-49ef-8484-ba168851ad90-kube-api-access-qtlfl\") pod \"certified-operators-qksv2\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.825958 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-catalog-content\") pod \"certified-operators-qksv2\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.826036 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-utilities\") pod \"certified-operators-qksv2\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.853414 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtlfl\" (UniqueName: \"kubernetes.io/projected/b8fff210-1c33-49ef-8484-ba168851ad90-kube-api-access-qtlfl\") pod \"certified-operators-qksv2\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:46 crc kubenswrapper[4876]: I1215 07:49:46.939077 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:47 crc kubenswrapper[4876]: I1215 07:49:47.434183 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qksv2"] Dec 15 07:49:47 crc kubenswrapper[4876]: W1215 07:49:47.439293 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8fff210_1c33_49ef_8484_ba168851ad90.slice/crio-74095983a9ebe4cf72f5d939456c2ffefabaa6c691b03015365707aa82310a2d WatchSource:0}: Error finding container 74095983a9ebe4cf72f5d939456c2ffefabaa6c691b03015365707aa82310a2d: Status 404 returned error can't find the container with id 74095983a9ebe4cf72f5d939456c2ffefabaa6c691b03015365707aa82310a2d Dec 15 07:49:47 crc kubenswrapper[4876]: I1215 07:49:47.816851 4876 generic.go:334] "Generic (PLEG): container finished" podID="b8fff210-1c33-49ef-8484-ba168851ad90" containerID="9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55" exitCode=0 Dec 15 07:49:47 crc kubenswrapper[4876]: I1215 07:49:47.817058 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qksv2" event={"ID":"b8fff210-1c33-49ef-8484-ba168851ad90","Type":"ContainerDied","Data":"9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55"} Dec 15 07:49:47 crc kubenswrapper[4876]: I1215 07:49:47.817240 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qksv2" event={"ID":"b8fff210-1c33-49ef-8484-ba168851ad90","Type":"ContainerStarted","Data":"74095983a9ebe4cf72f5d939456c2ffefabaa6c691b03015365707aa82310a2d"} Dec 15 07:49:47 crc kubenswrapper[4876]: I1215 07:49:47.818455 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 07:49:48 crc kubenswrapper[4876]: I1215 07:49:48.827247 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qksv2" event={"ID":"b8fff210-1c33-49ef-8484-ba168851ad90","Type":"ContainerStarted","Data":"28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a"} Dec 15 07:49:49 crc kubenswrapper[4876]: I1215 07:49:49.839461 4876 generic.go:334] "Generic (PLEG): container finished" podID="b8fff210-1c33-49ef-8484-ba168851ad90" containerID="28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a" exitCode=0 Dec 15 07:49:49 crc kubenswrapper[4876]: I1215 07:49:49.839550 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qksv2" event={"ID":"b8fff210-1c33-49ef-8484-ba168851ad90","Type":"ContainerDied","Data":"28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a"} Dec 15 07:49:50 crc kubenswrapper[4876]: I1215 07:49:50.848466 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qksv2" event={"ID":"b8fff210-1c33-49ef-8484-ba168851ad90","Type":"ContainerStarted","Data":"fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528"} Dec 15 07:49:50 crc kubenswrapper[4876]: I1215 07:49:50.886732 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qksv2" podStartSLOduration=2.204476143 podStartE2EDuration="4.88669856s" podCreationTimestamp="2025-12-15 07:49:46 +0000 UTC" firstStartedPulling="2025-12-15 07:49:47.818207482 +0000 UTC m=+3513.389350393" lastFinishedPulling="2025-12-15 07:49:50.500429899 +0000 UTC m=+3516.071572810" observedRunningTime="2025-12-15 07:49:50.880133501 +0000 UTC m=+3516.451276442" watchObservedRunningTime="2025-12-15 07:49:50.88669856 +0000 UTC m=+3516.457841521" Dec 15 07:49:56 crc kubenswrapper[4876]: I1215 07:49:56.940149 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:56 crc kubenswrapper[4876]: I1215 07:49:56.940209 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:57 crc kubenswrapper[4876]: I1215 07:49:57.014337 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:57 crc kubenswrapper[4876]: I1215 07:49:57.322780 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:49:57 crc kubenswrapper[4876]: I1215 07:49:57.323241 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:49:57 crc kubenswrapper[4876]: I1215 07:49:57.962857 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:49:58 crc kubenswrapper[4876]: I1215 07:49:58.015470 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qksv2"] Dec 15 07:49:59 crc kubenswrapper[4876]: I1215 07:49:59.917314 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qksv2" podUID="b8fff210-1c33-49ef-8484-ba168851ad90" containerName="registry-server" containerID="cri-o://fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528" gracePeriod=2 Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.782447 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.925537 4876 generic.go:334] "Generic (PLEG): container finished" podID="b8fff210-1c33-49ef-8484-ba168851ad90" containerID="fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528" exitCode=0 Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.925579 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qksv2" event={"ID":"b8fff210-1c33-49ef-8484-ba168851ad90","Type":"ContainerDied","Data":"fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528"} Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.925604 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qksv2" event={"ID":"b8fff210-1c33-49ef-8484-ba168851ad90","Type":"ContainerDied","Data":"74095983a9ebe4cf72f5d939456c2ffefabaa6c691b03015365707aa82310a2d"} Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.925622 4876 scope.go:117] "RemoveContainer" containerID="fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528" Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.925764 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qksv2" Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.945465 4876 scope.go:117] "RemoveContainer" containerID="28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a" Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.950510 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-catalog-content\") pod \"b8fff210-1c33-49ef-8484-ba168851ad90\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.950539 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-utilities\") pod \"b8fff210-1c33-49ef-8484-ba168851ad90\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.950561 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtlfl\" (UniqueName: \"kubernetes.io/projected/b8fff210-1c33-49ef-8484-ba168851ad90-kube-api-access-qtlfl\") pod \"b8fff210-1c33-49ef-8484-ba168851ad90\" (UID: \"b8fff210-1c33-49ef-8484-ba168851ad90\") " Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.951887 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-utilities" (OuterVolumeSpecName: "utilities") pod "b8fff210-1c33-49ef-8484-ba168851ad90" (UID: "b8fff210-1c33-49ef-8484-ba168851ad90"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.956584 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8fff210-1c33-49ef-8484-ba168851ad90-kube-api-access-qtlfl" (OuterVolumeSpecName: "kube-api-access-qtlfl") pod "b8fff210-1c33-49ef-8484-ba168851ad90" (UID: "b8fff210-1c33-49ef-8484-ba168851ad90"). InnerVolumeSpecName "kube-api-access-qtlfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:50:00 crc kubenswrapper[4876]: I1215 07:50:00.980347 4876 scope.go:117] "RemoveContainer" containerID="9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.008933 4876 scope.go:117] "RemoveContainer" containerID="fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528" Dec 15 07:50:01 crc kubenswrapper[4876]: E1215 07:50:01.009953 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528\": container with ID starting with fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528 not found: ID does not exist" containerID="fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.010020 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528"} err="failed to get container status \"fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528\": rpc error: code = NotFound desc = could not find container \"fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528\": container with ID starting with fb873c6eb8d46c2e7869c4b1951f22aaf9f8a35744cb5ebefc042d11270c8528 not found: ID does not exist" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.010054 4876 scope.go:117] "RemoveContainer" containerID="28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a" Dec 15 07:50:01 crc kubenswrapper[4876]: E1215 07:50:01.011746 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a\": container with ID starting with 28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a not found: ID does not exist" containerID="28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.011800 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a"} err="failed to get container status \"28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a\": rpc error: code = NotFound desc = could not find container \"28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a\": container with ID starting with 28a8a1966dd44b565b6d1ec250c8a203eb0279b5c26a2860ac09e79a1b95015a not found: ID does not exist" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.011847 4876 scope.go:117] "RemoveContainer" containerID="9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55" Dec 15 07:50:01 crc kubenswrapper[4876]: E1215 07:50:01.012420 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55\": container with ID starting with 9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55 not found: ID does not exist" containerID="9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.012480 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55"} err="failed to get container status \"9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55\": rpc error: code = NotFound desc = could not find container \"9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55\": container with ID starting with 9cc56453b546988a7160a47fab93f955b2b2ced5cf930b4c589ec8eb5ab38a55 not found: ID does not exist" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.016926 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8fff210-1c33-49ef-8484-ba168851ad90" (UID: "b8fff210-1c33-49ef-8484-ba168851ad90"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.052320 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.052363 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8fff210-1c33-49ef-8484-ba168851ad90-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.052377 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtlfl\" (UniqueName: \"kubernetes.io/projected/b8fff210-1c33-49ef-8484-ba168851ad90-kube-api-access-qtlfl\") on node \"crc\" DevicePath \"\"" Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.265161 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qksv2"] Dec 15 07:50:01 crc kubenswrapper[4876]: I1215 07:50:01.273940 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qksv2"] Dec 15 07:50:02 crc kubenswrapper[4876]: I1215 07:50:02.718308 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8fff210-1c33-49ef-8484-ba168851ad90" path="/var/lib/kubelet/pods/b8fff210-1c33-49ef-8484-ba168851ad90/volumes" Dec 15 07:50:27 crc kubenswrapper[4876]: I1215 07:50:27.323359 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:50:27 crc kubenswrapper[4876]: I1215 07:50:27.323799 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:50:57 crc kubenswrapper[4876]: I1215 07:50:57.322895 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:50:57 crc kubenswrapper[4876]: I1215 07:50:57.323642 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:50:57 crc kubenswrapper[4876]: I1215 07:50:57.323696 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:50:57 crc kubenswrapper[4876]: I1215 07:50:57.324405 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:50:57 crc kubenswrapper[4876]: I1215 07:50:57.324475 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" gracePeriod=600 Dec 15 07:50:57 crc kubenswrapper[4876]: E1215 07:50:57.469222 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:50:57 crc kubenswrapper[4876]: I1215 07:50:57.679977 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" exitCode=0 Dec 15 07:50:57 crc kubenswrapper[4876]: I1215 07:50:57.680041 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021"} Dec 15 07:50:57 crc kubenswrapper[4876]: I1215 07:50:57.680089 4876 scope.go:117] "RemoveContainer" containerID="3db4ab569f2408085c98313f82e791938c01c9f0bfb86f07b05f189b3572205d" Dec 15 07:50:57 crc kubenswrapper[4876]: I1215 07:50:57.680796 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:50:57 crc kubenswrapper[4876]: E1215 07:50:57.681202 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:51:10 crc kubenswrapper[4876]: I1215 07:51:10.705878 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:51:10 crc kubenswrapper[4876]: E1215 07:51:10.707249 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:51:21 crc kubenswrapper[4876]: I1215 07:51:21.705039 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:51:21 crc kubenswrapper[4876]: E1215 07:51:21.705626 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:51:35 crc kubenswrapper[4876]: I1215 07:51:35.706159 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:51:35 crc kubenswrapper[4876]: E1215 07:51:35.706916 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:51:47 crc kubenswrapper[4876]: I1215 07:51:47.705220 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:51:47 crc kubenswrapper[4876]: E1215 07:51:47.705874 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:52:02 crc kubenswrapper[4876]: I1215 07:52:02.713021 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:52:02 crc kubenswrapper[4876]: E1215 07:52:02.713896 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:52:15 crc kubenswrapper[4876]: I1215 07:52:15.705496 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:52:15 crc kubenswrapper[4876]: E1215 07:52:15.706563 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:52:26 crc kubenswrapper[4876]: I1215 07:52:26.705280 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:52:26 crc kubenswrapper[4876]: E1215 07:52:26.706255 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:52:41 crc kubenswrapper[4876]: I1215 07:52:41.706003 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:52:41 crc kubenswrapper[4876]: E1215 07:52:41.706875 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:52:52 crc kubenswrapper[4876]: I1215 07:52:52.705894 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:52:52 crc kubenswrapper[4876]: E1215 07:52:52.706673 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:53:06 crc kubenswrapper[4876]: I1215 07:53:06.704964 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:53:06 crc kubenswrapper[4876]: E1215 07:53:06.705753 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:53:21 crc kubenswrapper[4876]: I1215 07:53:21.706009 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:53:21 crc kubenswrapper[4876]: E1215 07:53:21.706828 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:53:32 crc kubenswrapper[4876]: I1215 07:53:32.706536 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:53:32 crc kubenswrapper[4876]: E1215 07:53:32.707468 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:53:44 crc kubenswrapper[4876]: I1215 07:53:44.711950 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:53:44 crc kubenswrapper[4876]: E1215 07:53:44.712886 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:53:57 crc kubenswrapper[4876]: I1215 07:53:57.705775 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:53:57 crc kubenswrapper[4876]: E1215 07:53:57.707022 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:54:09 crc kubenswrapper[4876]: I1215 07:54:09.705582 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:54:09 crc kubenswrapper[4876]: E1215 07:54:09.707065 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.638792 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zmwbb"] Dec 15 07:54:20 crc kubenswrapper[4876]: E1215 07:54:20.639813 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8fff210-1c33-49ef-8484-ba168851ad90" containerName="extract-utilities" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.639829 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8fff210-1c33-49ef-8484-ba168851ad90" containerName="extract-utilities" Dec 15 07:54:20 crc kubenswrapper[4876]: E1215 07:54:20.639848 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8fff210-1c33-49ef-8484-ba168851ad90" containerName="extract-content" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.639858 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8fff210-1c33-49ef-8484-ba168851ad90" containerName="extract-content" Dec 15 07:54:20 crc kubenswrapper[4876]: E1215 07:54:20.639886 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8fff210-1c33-49ef-8484-ba168851ad90" containerName="registry-server" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.639895 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8fff210-1c33-49ef-8484-ba168851ad90" containerName="registry-server" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.640065 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8fff210-1c33-49ef-8484-ba168851ad90" containerName="registry-server" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.641697 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.658572 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zmwbb"] Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.681875 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-utilities\") pod \"community-operators-zmwbb\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.681999 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-catalog-content\") pod \"community-operators-zmwbb\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.682199 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66sf4\" (UniqueName: \"kubernetes.io/projected/58dfef19-1b56-489c-93d0-ea4550d68cb9-kube-api-access-66sf4\") pod \"community-operators-zmwbb\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.705258 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:54:20 crc kubenswrapper[4876]: E1215 07:54:20.705531 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.783569 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66sf4\" (UniqueName: \"kubernetes.io/projected/58dfef19-1b56-489c-93d0-ea4550d68cb9-kube-api-access-66sf4\") pod \"community-operators-zmwbb\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.783683 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-utilities\") pod \"community-operators-zmwbb\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.783744 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-catalog-content\") pod \"community-operators-zmwbb\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.784946 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-utilities\") pod \"community-operators-zmwbb\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.785059 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-catalog-content\") pod \"community-operators-zmwbb\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.805170 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66sf4\" (UniqueName: \"kubernetes.io/projected/58dfef19-1b56-489c-93d0-ea4550d68cb9-kube-api-access-66sf4\") pod \"community-operators-zmwbb\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:20 crc kubenswrapper[4876]: I1215 07:54:20.973429 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:21 crc kubenswrapper[4876]: I1215 07:54:21.461188 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zmwbb"] Dec 15 07:54:21 crc kubenswrapper[4876]: W1215 07:54:21.462452 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58dfef19_1b56_489c_93d0_ea4550d68cb9.slice/crio-0f42e217a6f717e8b1ca3eada2b0a69ae3a693cb287756155078018c10557a64 WatchSource:0}: Error finding container 0f42e217a6f717e8b1ca3eada2b0a69ae3a693cb287756155078018c10557a64: Status 404 returned error can't find the container with id 0f42e217a6f717e8b1ca3eada2b0a69ae3a693cb287756155078018c10557a64 Dec 15 07:54:22 crc kubenswrapper[4876]: I1215 07:54:22.309572 4876 generic.go:334] "Generic (PLEG): container finished" podID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerID="00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5" exitCode=0 Dec 15 07:54:22 crc kubenswrapper[4876]: I1215 07:54:22.309641 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmwbb" event={"ID":"58dfef19-1b56-489c-93d0-ea4550d68cb9","Type":"ContainerDied","Data":"00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5"} Dec 15 07:54:22 crc kubenswrapper[4876]: I1215 07:54:22.309865 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmwbb" event={"ID":"58dfef19-1b56-489c-93d0-ea4550d68cb9","Type":"ContainerStarted","Data":"0f42e217a6f717e8b1ca3eada2b0a69ae3a693cb287756155078018c10557a64"} Dec 15 07:54:23 crc kubenswrapper[4876]: I1215 07:54:23.320374 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmwbb" event={"ID":"58dfef19-1b56-489c-93d0-ea4550d68cb9","Type":"ContainerStarted","Data":"46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94"} Dec 15 07:54:24 crc kubenswrapper[4876]: I1215 07:54:24.328670 4876 generic.go:334] "Generic (PLEG): container finished" podID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerID="46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94" exitCode=0 Dec 15 07:54:24 crc kubenswrapper[4876]: I1215 07:54:24.328717 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmwbb" event={"ID":"58dfef19-1b56-489c-93d0-ea4550d68cb9","Type":"ContainerDied","Data":"46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94"} Dec 15 07:54:25 crc kubenswrapper[4876]: I1215 07:54:25.337635 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmwbb" event={"ID":"58dfef19-1b56-489c-93d0-ea4550d68cb9","Type":"ContainerStarted","Data":"a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5"} Dec 15 07:54:25 crc kubenswrapper[4876]: I1215 07:54:25.355162 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zmwbb" podStartSLOduration=2.709736758 podStartE2EDuration="5.355140192s" podCreationTimestamp="2025-12-15 07:54:20 +0000 UTC" firstStartedPulling="2025-12-15 07:54:22.311151632 +0000 UTC m=+3787.882294553" lastFinishedPulling="2025-12-15 07:54:24.956555046 +0000 UTC m=+3790.527697987" observedRunningTime="2025-12-15 07:54:25.352454129 +0000 UTC m=+3790.923597070" watchObservedRunningTime="2025-12-15 07:54:25.355140192 +0000 UTC m=+3790.926283133" Dec 15 07:54:30 crc kubenswrapper[4876]: I1215 07:54:30.973626 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:30 crc kubenswrapper[4876]: I1215 07:54:30.973951 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:31 crc kubenswrapper[4876]: I1215 07:54:31.019406 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:31 crc kubenswrapper[4876]: I1215 07:54:31.445408 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:31 crc kubenswrapper[4876]: I1215 07:54:31.505302 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zmwbb"] Dec 15 07:54:33 crc kubenswrapper[4876]: I1215 07:54:33.406939 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zmwbb" podUID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerName="registry-server" containerID="cri-o://a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5" gracePeriod=2 Dec 15 07:54:33 crc kubenswrapper[4876]: I1215 07:54:33.780643 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:33 crc kubenswrapper[4876]: I1215 07:54:33.868877 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-catalog-content\") pod \"58dfef19-1b56-489c-93d0-ea4550d68cb9\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " Dec 15 07:54:33 crc kubenswrapper[4876]: I1215 07:54:33.868965 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-utilities\") pod \"58dfef19-1b56-489c-93d0-ea4550d68cb9\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " Dec 15 07:54:33 crc kubenswrapper[4876]: I1215 07:54:33.868998 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66sf4\" (UniqueName: \"kubernetes.io/projected/58dfef19-1b56-489c-93d0-ea4550d68cb9-kube-api-access-66sf4\") pod \"58dfef19-1b56-489c-93d0-ea4550d68cb9\" (UID: \"58dfef19-1b56-489c-93d0-ea4550d68cb9\") " Dec 15 07:54:33 crc kubenswrapper[4876]: I1215 07:54:33.869892 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-utilities" (OuterVolumeSpecName: "utilities") pod "58dfef19-1b56-489c-93d0-ea4550d68cb9" (UID: "58dfef19-1b56-489c-93d0-ea4550d68cb9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:54:33 crc kubenswrapper[4876]: I1215 07:54:33.876657 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58dfef19-1b56-489c-93d0-ea4550d68cb9-kube-api-access-66sf4" (OuterVolumeSpecName: "kube-api-access-66sf4") pod "58dfef19-1b56-489c-93d0-ea4550d68cb9" (UID: "58dfef19-1b56-489c-93d0-ea4550d68cb9"). InnerVolumeSpecName "kube-api-access-66sf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:54:33 crc kubenswrapper[4876]: I1215 07:54:33.971700 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:54:33 crc kubenswrapper[4876]: I1215 07:54:33.971753 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66sf4\" (UniqueName: \"kubernetes.io/projected/58dfef19-1b56-489c-93d0-ea4550d68cb9-kube-api-access-66sf4\") on node \"crc\" DevicePath \"\"" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.111948 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58dfef19-1b56-489c-93d0-ea4550d68cb9" (UID: "58dfef19-1b56-489c-93d0-ea4550d68cb9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.174745 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58dfef19-1b56-489c-93d0-ea4550d68cb9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.421162 4876 generic.go:334] "Generic (PLEG): container finished" podID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerID="a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5" exitCode=0 Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.421234 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmwbb" event={"ID":"58dfef19-1b56-489c-93d0-ea4550d68cb9","Type":"ContainerDied","Data":"a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5"} Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.421275 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zmwbb" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.421291 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zmwbb" event={"ID":"58dfef19-1b56-489c-93d0-ea4550d68cb9","Type":"ContainerDied","Data":"0f42e217a6f717e8b1ca3eada2b0a69ae3a693cb287756155078018c10557a64"} Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.421332 4876 scope.go:117] "RemoveContainer" containerID="a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.468335 4876 scope.go:117] "RemoveContainer" containerID="46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.476529 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zmwbb"] Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.487125 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zmwbb"] Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.497475 4876 scope.go:117] "RemoveContainer" containerID="00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.528580 4876 scope.go:117] "RemoveContainer" containerID="a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5" Dec 15 07:54:34 crc kubenswrapper[4876]: E1215 07:54:34.529890 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5\": container with ID starting with a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5 not found: ID does not exist" containerID="a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.529959 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5"} err="failed to get container status \"a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5\": rpc error: code = NotFound desc = could not find container \"a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5\": container with ID starting with a39996a652dbf2fbbb199d0e421190ce1c036fe60b163e79d4bd4aaae28f98a5 not found: ID does not exist" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.530003 4876 scope.go:117] "RemoveContainer" containerID="46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94" Dec 15 07:54:34 crc kubenswrapper[4876]: E1215 07:54:34.530691 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94\": container with ID starting with 46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94 not found: ID does not exist" containerID="46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.530736 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94"} err="failed to get container status \"46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94\": rpc error: code = NotFound desc = could not find container \"46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94\": container with ID starting with 46e3cf6f585cc122f861a3bcd94e531b275106ab0dde1188ac7a2fe5e3a98e94 not found: ID does not exist" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.530771 4876 scope.go:117] "RemoveContainer" containerID="00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5" Dec 15 07:54:34 crc kubenswrapper[4876]: E1215 07:54:34.531033 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5\": container with ID starting with 00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5 not found: ID does not exist" containerID="00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.531056 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5"} err="failed to get container status \"00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5\": rpc error: code = NotFound desc = could not find container \"00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5\": container with ID starting with 00654abfcb1e385e324e0ba4cb916d84bcecc38e03f6289c94e4294d7d2bc4e5 not found: ID does not exist" Dec 15 07:54:34 crc kubenswrapper[4876]: I1215 07:54:34.715155 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58dfef19-1b56-489c-93d0-ea4550d68cb9" path="/var/lib/kubelet/pods/58dfef19-1b56-489c-93d0-ea4550d68cb9/volumes" Dec 15 07:54:35 crc kubenswrapper[4876]: I1215 07:54:35.704909 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:54:35 crc kubenswrapper[4876]: E1215 07:54:35.705319 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:54:48 crc kubenswrapper[4876]: I1215 07:54:48.705837 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:54:48 crc kubenswrapper[4876]: E1215 07:54:48.707177 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:54:59 crc kubenswrapper[4876]: I1215 07:54:59.706496 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:54:59 crc kubenswrapper[4876]: E1215 07:54:59.707284 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:55:10 crc kubenswrapper[4876]: I1215 07:55:10.706148 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:55:10 crc kubenswrapper[4876]: E1215 07:55:10.707159 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:55:22 crc kubenswrapper[4876]: I1215 07:55:22.706740 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:55:22 crc kubenswrapper[4876]: E1215 07:55:22.708060 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:55:36 crc kubenswrapper[4876]: I1215 07:55:36.706027 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:55:36 crc kubenswrapper[4876]: E1215 07:55:36.708709 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:55:50 crc kubenswrapper[4876]: I1215 07:55:50.707497 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:55:50 crc kubenswrapper[4876]: E1215 07:55:50.708580 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.616673 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6mdh8"] Dec 15 07:55:59 crc kubenswrapper[4876]: E1215 07:55:59.620356 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerName="extract-utilities" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.620696 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerName="extract-utilities" Dec 15 07:55:59 crc kubenswrapper[4876]: E1215 07:55:59.620787 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerName="registry-server" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.620890 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerName="registry-server" Dec 15 07:55:59 crc kubenswrapper[4876]: E1215 07:55:59.620996 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerName="extract-content" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.621076 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerName="extract-content" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.621402 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="58dfef19-1b56-489c-93d0-ea4550d68cb9" containerName="registry-server" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.623058 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.632189 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6mdh8"] Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.706930 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-utilities\") pod \"redhat-marketplace-6mdh8\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.706994 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-catalog-content\") pod \"redhat-marketplace-6mdh8\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.707090 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5s46\" (UniqueName: \"kubernetes.io/projected/e9852011-b3cc-4941-9e26-c21452e86bd6-kube-api-access-g5s46\") pod \"redhat-marketplace-6mdh8\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.808292 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5s46\" (UniqueName: \"kubernetes.io/projected/e9852011-b3cc-4941-9e26-c21452e86bd6-kube-api-access-g5s46\") pod \"redhat-marketplace-6mdh8\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.808418 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-utilities\") pod \"redhat-marketplace-6mdh8\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.808461 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-catalog-content\") pod \"redhat-marketplace-6mdh8\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.809279 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-utilities\") pod \"redhat-marketplace-6mdh8\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.809514 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-catalog-content\") pod \"redhat-marketplace-6mdh8\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.832165 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5s46\" (UniqueName: \"kubernetes.io/projected/e9852011-b3cc-4941-9e26-c21452e86bd6-kube-api-access-g5s46\") pod \"redhat-marketplace-6mdh8\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:55:59 crc kubenswrapper[4876]: I1215 07:55:59.949134 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:56:00 crc kubenswrapper[4876]: I1215 07:56:00.513820 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6mdh8"] Dec 15 07:56:01 crc kubenswrapper[4876]: I1215 07:56:01.101769 4876 generic.go:334] "Generic (PLEG): container finished" podID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerID="aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe" exitCode=0 Dec 15 07:56:01 crc kubenswrapper[4876]: I1215 07:56:01.102002 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mdh8" event={"ID":"e9852011-b3cc-4941-9e26-c21452e86bd6","Type":"ContainerDied","Data":"aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe"} Dec 15 07:56:01 crc kubenswrapper[4876]: I1215 07:56:01.102259 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mdh8" event={"ID":"e9852011-b3cc-4941-9e26-c21452e86bd6","Type":"ContainerStarted","Data":"513cc44dd765f77b41e2b7db9b0260a406e8bc5276af53df6fccc454d9e02022"} Dec 15 07:56:01 crc kubenswrapper[4876]: I1215 07:56:01.104186 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 07:56:01 crc kubenswrapper[4876]: I1215 07:56:01.705057 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 07:56:02 crc kubenswrapper[4876]: I1215 07:56:02.108936 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"025664be1aef554873bd1a5378aebf1083a31b6f2465234001b378edd8a789f1"} Dec 15 07:56:02 crc kubenswrapper[4876]: I1215 07:56:02.110283 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mdh8" event={"ID":"e9852011-b3cc-4941-9e26-c21452e86bd6","Type":"ContainerStarted","Data":"6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981"} Dec 15 07:56:03 crc kubenswrapper[4876]: I1215 07:56:03.121177 4876 generic.go:334] "Generic (PLEG): container finished" podID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerID="6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981" exitCode=0 Dec 15 07:56:03 crc kubenswrapper[4876]: I1215 07:56:03.121529 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mdh8" event={"ID":"e9852011-b3cc-4941-9e26-c21452e86bd6","Type":"ContainerDied","Data":"6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981"} Dec 15 07:56:04 crc kubenswrapper[4876]: I1215 07:56:04.132504 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mdh8" event={"ID":"e9852011-b3cc-4941-9e26-c21452e86bd6","Type":"ContainerStarted","Data":"4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f"} Dec 15 07:56:04 crc kubenswrapper[4876]: I1215 07:56:04.156796 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6mdh8" podStartSLOduration=2.6683980419999997 podStartE2EDuration="5.15677069s" podCreationTimestamp="2025-12-15 07:55:59 +0000 UTC" firstStartedPulling="2025-12-15 07:56:01.103624556 +0000 UTC m=+3886.674767487" lastFinishedPulling="2025-12-15 07:56:03.591997234 +0000 UTC m=+3889.163140135" observedRunningTime="2025-12-15 07:56:04.149088715 +0000 UTC m=+3889.720231626" watchObservedRunningTime="2025-12-15 07:56:04.15677069 +0000 UTC m=+3889.727913621" Dec 15 07:56:09 crc kubenswrapper[4876]: I1215 07:56:09.950365 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:56:09 crc kubenswrapper[4876]: I1215 07:56:09.951266 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:56:10 crc kubenswrapper[4876]: I1215 07:56:10.024350 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:56:10 crc kubenswrapper[4876]: I1215 07:56:10.222740 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:56:11 crc kubenswrapper[4876]: I1215 07:56:11.378506 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6mdh8"] Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.186625 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6mdh8" podUID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerName="registry-server" containerID="cri-o://4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f" gracePeriod=2 Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.554749 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.692386 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-utilities\") pod \"e9852011-b3cc-4941-9e26-c21452e86bd6\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.692547 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5s46\" (UniqueName: \"kubernetes.io/projected/e9852011-b3cc-4941-9e26-c21452e86bd6-kube-api-access-g5s46\") pod \"e9852011-b3cc-4941-9e26-c21452e86bd6\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.692633 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-catalog-content\") pod \"e9852011-b3cc-4941-9e26-c21452e86bd6\" (UID: \"e9852011-b3cc-4941-9e26-c21452e86bd6\") " Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.696128 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-utilities" (OuterVolumeSpecName: "utilities") pod "e9852011-b3cc-4941-9e26-c21452e86bd6" (UID: "e9852011-b3cc-4941-9e26-c21452e86bd6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.704293 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9852011-b3cc-4941-9e26-c21452e86bd6-kube-api-access-g5s46" (OuterVolumeSpecName: "kube-api-access-g5s46") pod "e9852011-b3cc-4941-9e26-c21452e86bd6" (UID: "e9852011-b3cc-4941-9e26-c21452e86bd6"). InnerVolumeSpecName "kube-api-access-g5s46". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.726023 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e9852011-b3cc-4941-9e26-c21452e86bd6" (UID: "e9852011-b3cc-4941-9e26-c21452e86bd6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.794359 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.794437 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5s46\" (UniqueName: \"kubernetes.io/projected/e9852011-b3cc-4941-9e26-c21452e86bd6-kube-api-access-g5s46\") on node \"crc\" DevicePath \"\"" Dec 15 07:56:12 crc kubenswrapper[4876]: I1215 07:56:12.794451 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9852011-b3cc-4941-9e26-c21452e86bd6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.196427 4876 generic.go:334] "Generic (PLEG): container finished" podID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerID="4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f" exitCode=0 Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.196495 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mdh8" event={"ID":"e9852011-b3cc-4941-9e26-c21452e86bd6","Type":"ContainerDied","Data":"4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f"} Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.196722 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6mdh8" event={"ID":"e9852011-b3cc-4941-9e26-c21452e86bd6","Type":"ContainerDied","Data":"513cc44dd765f77b41e2b7db9b0260a406e8bc5276af53df6fccc454d9e02022"} Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.196544 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6mdh8" Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.196743 4876 scope.go:117] "RemoveContainer" containerID="4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f" Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.218681 4876 scope.go:117] "RemoveContainer" containerID="6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981" Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.252645 4876 scope.go:117] "RemoveContainer" containerID="aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe" Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.254252 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6mdh8"] Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.260446 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6mdh8"] Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.283152 4876 scope.go:117] "RemoveContainer" containerID="4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f" Dec 15 07:56:13 crc kubenswrapper[4876]: E1215 07:56:13.283696 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f\": container with ID starting with 4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f not found: ID does not exist" containerID="4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f" Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.283737 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f"} err="failed to get container status \"4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f\": rpc error: code = NotFound desc = could not find container \"4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f\": container with ID starting with 4ef42a4af4db872034ea19e43f35b88219ad5473250bd0b2b9d7731a8e15849f not found: ID does not exist" Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.283771 4876 scope.go:117] "RemoveContainer" containerID="6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981" Dec 15 07:56:13 crc kubenswrapper[4876]: E1215 07:56:13.284230 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981\": container with ID starting with 6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981 not found: ID does not exist" containerID="6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981" Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.284268 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981"} err="failed to get container status \"6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981\": rpc error: code = NotFound desc = could not find container \"6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981\": container with ID starting with 6572a1afd5e1ca8660e164c6ab263aa35b585c7e074b07307395e789d4f38981 not found: ID does not exist" Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.284330 4876 scope.go:117] "RemoveContainer" containerID="aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe" Dec 15 07:56:13 crc kubenswrapper[4876]: E1215 07:56:13.284702 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe\": container with ID starting with aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe not found: ID does not exist" containerID="aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe" Dec 15 07:56:13 crc kubenswrapper[4876]: I1215 07:56:13.284743 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe"} err="failed to get container status \"aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe\": rpc error: code = NotFound desc = could not find container \"aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe\": container with ID starting with aedd4a503ecc2fc1be118f204c007689f3d7f26d517e71ed76910e920c392cfe not found: ID does not exist" Dec 15 07:56:14 crc kubenswrapper[4876]: I1215 07:56:14.716650 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9852011-b3cc-4941-9e26-c21452e86bd6" path="/var/lib/kubelet/pods/e9852011-b3cc-4941-9e26-c21452e86bd6/volumes" Dec 15 07:57:16 crc kubenswrapper[4876]: I1215 07:57:16.763254 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d8gxj"] Dec 15 07:57:16 crc kubenswrapper[4876]: E1215 07:57:16.764510 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerName="extract-utilities" Dec 15 07:57:16 crc kubenswrapper[4876]: I1215 07:57:16.764527 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerName="extract-utilities" Dec 15 07:57:16 crc kubenswrapper[4876]: E1215 07:57:16.764548 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerName="registry-server" Dec 15 07:57:16 crc kubenswrapper[4876]: I1215 07:57:16.764556 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerName="registry-server" Dec 15 07:57:16 crc kubenswrapper[4876]: E1215 07:57:16.764589 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerName="extract-content" Dec 15 07:57:16 crc kubenswrapper[4876]: I1215 07:57:16.764599 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerName="extract-content" Dec 15 07:57:16 crc kubenswrapper[4876]: I1215 07:57:16.764752 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9852011-b3cc-4941-9e26-c21452e86bd6" containerName="registry-server" Dec 15 07:57:16 crc kubenswrapper[4876]: I1215 07:57:16.765868 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:16 crc kubenswrapper[4876]: I1215 07:57:16.784943 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d8gxj"] Dec 15 07:57:16 crc kubenswrapper[4876]: I1215 07:57:16.915335 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-catalog-content\") pod \"redhat-operators-d8gxj\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:16 crc kubenswrapper[4876]: I1215 07:57:16.915403 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-utilities\") pod \"redhat-operators-d8gxj\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:16 crc kubenswrapper[4876]: I1215 07:57:16.915506 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vcfn\" (UniqueName: \"kubernetes.io/projected/63986f65-9587-494b-9ff0-ae6d1817dbd2-kube-api-access-6vcfn\") pod \"redhat-operators-d8gxj\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:17 crc kubenswrapper[4876]: I1215 07:57:17.016595 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-catalog-content\") pod \"redhat-operators-d8gxj\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:17 crc kubenswrapper[4876]: I1215 07:57:17.016637 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-utilities\") pod \"redhat-operators-d8gxj\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:17 crc kubenswrapper[4876]: I1215 07:57:17.016708 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vcfn\" (UniqueName: \"kubernetes.io/projected/63986f65-9587-494b-9ff0-ae6d1817dbd2-kube-api-access-6vcfn\") pod \"redhat-operators-d8gxj\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:17 crc kubenswrapper[4876]: I1215 07:57:17.017754 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-utilities\") pod \"redhat-operators-d8gxj\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:17 crc kubenswrapper[4876]: I1215 07:57:17.017844 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-catalog-content\") pod \"redhat-operators-d8gxj\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:17 crc kubenswrapper[4876]: I1215 07:57:17.042198 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vcfn\" (UniqueName: \"kubernetes.io/projected/63986f65-9587-494b-9ff0-ae6d1817dbd2-kube-api-access-6vcfn\") pod \"redhat-operators-d8gxj\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:17 crc kubenswrapper[4876]: I1215 07:57:17.110075 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:17 crc kubenswrapper[4876]: I1215 07:57:17.559889 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d8gxj"] Dec 15 07:57:17 crc kubenswrapper[4876]: I1215 07:57:17.728347 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8gxj" event={"ID":"63986f65-9587-494b-9ff0-ae6d1817dbd2","Type":"ContainerStarted","Data":"ce8d7b43715b8e965287e24ca7671dea419d6025ff1cea920974cd7c28bab90e"} Dec 15 07:57:17 crc kubenswrapper[4876]: I1215 07:57:17.728404 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8gxj" event={"ID":"63986f65-9587-494b-9ff0-ae6d1817dbd2","Type":"ContainerStarted","Data":"15e720cd5553d8a475ea1e72a775b12bbeb60e2816d2bb47d9d9cc573b3323b4"} Dec 15 07:57:18 crc kubenswrapper[4876]: I1215 07:57:18.734592 4876 generic.go:334] "Generic (PLEG): container finished" podID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerID="ce8d7b43715b8e965287e24ca7671dea419d6025ff1cea920974cd7c28bab90e" exitCode=0 Dec 15 07:57:18 crc kubenswrapper[4876]: I1215 07:57:18.734642 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8gxj" event={"ID":"63986f65-9587-494b-9ff0-ae6d1817dbd2","Type":"ContainerDied","Data":"ce8d7b43715b8e965287e24ca7671dea419d6025ff1cea920974cd7c28bab90e"} Dec 15 07:57:19 crc kubenswrapper[4876]: I1215 07:57:19.741975 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8gxj" event={"ID":"63986f65-9587-494b-9ff0-ae6d1817dbd2","Type":"ContainerStarted","Data":"815a640625db34cd85bb38bf9aa1668f249a47b4a46f16438e373cda85a33b49"} Dec 15 07:57:20 crc kubenswrapper[4876]: I1215 07:57:20.751187 4876 generic.go:334] "Generic (PLEG): container finished" podID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerID="815a640625db34cd85bb38bf9aa1668f249a47b4a46f16438e373cda85a33b49" exitCode=0 Dec 15 07:57:20 crc kubenswrapper[4876]: I1215 07:57:20.751268 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8gxj" event={"ID":"63986f65-9587-494b-9ff0-ae6d1817dbd2","Type":"ContainerDied","Data":"815a640625db34cd85bb38bf9aa1668f249a47b4a46f16438e373cda85a33b49"} Dec 15 07:57:21 crc kubenswrapper[4876]: I1215 07:57:21.760452 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8gxj" event={"ID":"63986f65-9587-494b-9ff0-ae6d1817dbd2","Type":"ContainerStarted","Data":"336ab987cd49fc037f8537cf0a181650fed3352f76538c262ce6f6f695bdc5f9"} Dec 15 07:57:21 crc kubenswrapper[4876]: I1215 07:57:21.785276 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d8gxj" podStartSLOduration=3.3435623789999998 podStartE2EDuration="5.785253886s" podCreationTimestamp="2025-12-15 07:57:16 +0000 UTC" firstStartedPulling="2025-12-15 07:57:18.735956505 +0000 UTC m=+3964.307099416" lastFinishedPulling="2025-12-15 07:57:21.177647992 +0000 UTC m=+3966.748790923" observedRunningTime="2025-12-15 07:57:21.780433548 +0000 UTC m=+3967.351576469" watchObservedRunningTime="2025-12-15 07:57:21.785253886 +0000 UTC m=+3967.356396807" Dec 15 07:57:27 crc kubenswrapper[4876]: I1215 07:57:27.111497 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:27 crc kubenswrapper[4876]: I1215 07:57:27.112136 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:27 crc kubenswrapper[4876]: I1215 07:57:27.152830 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:27 crc kubenswrapper[4876]: I1215 07:57:27.885710 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:27 crc kubenswrapper[4876]: I1215 07:57:27.959375 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d8gxj"] Dec 15 07:57:29 crc kubenswrapper[4876]: I1215 07:57:29.823375 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-d8gxj" podUID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerName="registry-server" containerID="cri-o://336ab987cd49fc037f8537cf0a181650fed3352f76538c262ce6f6f695bdc5f9" gracePeriod=2 Dec 15 07:57:31 crc kubenswrapper[4876]: I1215 07:57:31.839169 4876 generic.go:334] "Generic (PLEG): container finished" podID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerID="336ab987cd49fc037f8537cf0a181650fed3352f76538c262ce6f6f695bdc5f9" exitCode=0 Dec 15 07:57:31 crc kubenswrapper[4876]: I1215 07:57:31.839280 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8gxj" event={"ID":"63986f65-9587-494b-9ff0-ae6d1817dbd2","Type":"ContainerDied","Data":"336ab987cd49fc037f8537cf0a181650fed3352f76538c262ce6f6f695bdc5f9"} Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.341372 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.458054 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-catalog-content\") pod \"63986f65-9587-494b-9ff0-ae6d1817dbd2\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.458154 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-utilities\") pod \"63986f65-9587-494b-9ff0-ae6d1817dbd2\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.458181 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vcfn\" (UniqueName: \"kubernetes.io/projected/63986f65-9587-494b-9ff0-ae6d1817dbd2-kube-api-access-6vcfn\") pod \"63986f65-9587-494b-9ff0-ae6d1817dbd2\" (UID: \"63986f65-9587-494b-9ff0-ae6d1817dbd2\") " Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.458920 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-utilities" (OuterVolumeSpecName: "utilities") pod "63986f65-9587-494b-9ff0-ae6d1817dbd2" (UID: "63986f65-9587-494b-9ff0-ae6d1817dbd2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.465429 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63986f65-9587-494b-9ff0-ae6d1817dbd2-kube-api-access-6vcfn" (OuterVolumeSpecName: "kube-api-access-6vcfn") pod "63986f65-9587-494b-9ff0-ae6d1817dbd2" (UID: "63986f65-9587-494b-9ff0-ae6d1817dbd2"). InnerVolumeSpecName "kube-api-access-6vcfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.563997 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.564024 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vcfn\" (UniqueName: \"kubernetes.io/projected/63986f65-9587-494b-9ff0-ae6d1817dbd2-kube-api-access-6vcfn\") on node \"crc\" DevicePath \"\"" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.583455 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "63986f65-9587-494b-9ff0-ae6d1817dbd2" (UID: "63986f65-9587-494b-9ff0-ae6d1817dbd2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.666072 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63986f65-9587-494b-9ff0-ae6d1817dbd2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.849092 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d8gxj" event={"ID":"63986f65-9587-494b-9ff0-ae6d1817dbd2","Type":"ContainerDied","Data":"15e720cd5553d8a475ea1e72a775b12bbeb60e2816d2bb47d9d9cc573b3323b4"} Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.849160 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d8gxj" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.849163 4876 scope.go:117] "RemoveContainer" containerID="336ab987cd49fc037f8537cf0a181650fed3352f76538c262ce6f6f695bdc5f9" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.872136 4876 scope.go:117] "RemoveContainer" containerID="815a640625db34cd85bb38bf9aa1668f249a47b4a46f16438e373cda85a33b49" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.881172 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d8gxj"] Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.896380 4876 scope.go:117] "RemoveContainer" containerID="ce8d7b43715b8e965287e24ca7671dea419d6025ff1cea920974cd7c28bab90e" Dec 15 07:57:32 crc kubenswrapper[4876]: I1215 07:57:32.898593 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-d8gxj"] Dec 15 07:57:34 crc kubenswrapper[4876]: I1215 07:57:34.719442 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63986f65-9587-494b-9ff0-ae6d1817dbd2" path="/var/lib/kubelet/pods/63986f65-9587-494b-9ff0-ae6d1817dbd2/volumes" Dec 15 07:58:27 crc kubenswrapper[4876]: I1215 07:58:27.322468 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:58:27 crc kubenswrapper[4876]: I1215 07:58:27.323025 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:58:57 crc kubenswrapper[4876]: I1215 07:58:57.322262 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:58:57 crc kubenswrapper[4876]: I1215 07:58:57.322894 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:59:27 crc kubenswrapper[4876]: I1215 07:59:27.322589 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 07:59:27 crc kubenswrapper[4876]: I1215 07:59:27.323165 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 07:59:27 crc kubenswrapper[4876]: I1215 07:59:27.323241 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 07:59:27 crc kubenswrapper[4876]: I1215 07:59:27.323854 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"025664be1aef554873bd1a5378aebf1083a31b6f2465234001b378edd8a789f1"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 07:59:27 crc kubenswrapper[4876]: I1215 07:59:27.324001 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://025664be1aef554873bd1a5378aebf1083a31b6f2465234001b378edd8a789f1" gracePeriod=600 Dec 15 07:59:27 crc kubenswrapper[4876]: I1215 07:59:27.805059 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="025664be1aef554873bd1a5378aebf1083a31b6f2465234001b378edd8a789f1" exitCode=0 Dec 15 07:59:27 crc kubenswrapper[4876]: I1215 07:59:27.805138 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"025664be1aef554873bd1a5378aebf1083a31b6f2465234001b378edd8a789f1"} Dec 15 07:59:27 crc kubenswrapper[4876]: I1215 07:59:27.805696 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c"} Dec 15 07:59:27 crc kubenswrapper[4876]: I1215 07:59:27.805810 4876 scope.go:117] "RemoveContainer" containerID="91f5cdf97dc944610008a258c6fb968a2d22fb7a23c4b97c398ec2d910e59021" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.179069 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg"] Dec 15 08:00:00 crc kubenswrapper[4876]: E1215 08:00:00.179938 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerName="extract-content" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.179952 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerName="extract-content" Dec 15 08:00:00 crc kubenswrapper[4876]: E1215 08:00:00.179968 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerName="registry-server" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.179975 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerName="registry-server" Dec 15 08:00:00 crc kubenswrapper[4876]: E1215 08:00:00.179992 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerName="extract-utilities" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.179998 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerName="extract-utilities" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.180136 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="63986f65-9587-494b-9ff0-ae6d1817dbd2" containerName="registry-server" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.180594 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.182268 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.183464 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.193092 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg"] Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.298096 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg9f7\" (UniqueName: \"kubernetes.io/projected/ff39bd75-4416-44bc-b207-29c57dbb3975-kube-api-access-cg9f7\") pod \"collect-profiles-29429760-22glg\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.298172 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff39bd75-4416-44bc-b207-29c57dbb3975-config-volume\") pod \"collect-profiles-29429760-22glg\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.298211 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff39bd75-4416-44bc-b207-29c57dbb3975-secret-volume\") pod \"collect-profiles-29429760-22glg\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.399442 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff39bd75-4416-44bc-b207-29c57dbb3975-config-volume\") pod \"collect-profiles-29429760-22glg\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.399511 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff39bd75-4416-44bc-b207-29c57dbb3975-secret-volume\") pod \"collect-profiles-29429760-22glg\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.399596 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg9f7\" (UniqueName: \"kubernetes.io/projected/ff39bd75-4416-44bc-b207-29c57dbb3975-kube-api-access-cg9f7\") pod \"collect-profiles-29429760-22glg\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.400393 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff39bd75-4416-44bc-b207-29c57dbb3975-config-volume\") pod \"collect-profiles-29429760-22glg\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.754168 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg9f7\" (UniqueName: \"kubernetes.io/projected/ff39bd75-4416-44bc-b207-29c57dbb3975-kube-api-access-cg9f7\") pod \"collect-profiles-29429760-22glg\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.754190 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff39bd75-4416-44bc-b207-29c57dbb3975-secret-volume\") pod \"collect-profiles-29429760-22glg\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:00 crc kubenswrapper[4876]: I1215 08:00:00.805907 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:01 crc kubenswrapper[4876]: I1215 08:00:01.242364 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg"] Dec 15 08:00:02 crc kubenswrapper[4876]: I1215 08:00:02.056267 4876 generic.go:334] "Generic (PLEG): container finished" podID="ff39bd75-4416-44bc-b207-29c57dbb3975" containerID="8c5d97fe05cc1d994041421a8ea06f64102d045b63c6bddb963e21afb7c16790" exitCode=0 Dec 15 08:00:02 crc kubenswrapper[4876]: I1215 08:00:02.056428 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" event={"ID":"ff39bd75-4416-44bc-b207-29c57dbb3975","Type":"ContainerDied","Data":"8c5d97fe05cc1d994041421a8ea06f64102d045b63c6bddb963e21afb7c16790"} Dec 15 08:00:02 crc kubenswrapper[4876]: I1215 08:00:02.056846 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" event={"ID":"ff39bd75-4416-44bc-b207-29c57dbb3975","Type":"ContainerStarted","Data":"10ff8f99a2405d312a1921b9064db00d8b8a29c7e0f101b8b3a6ee1be2bc1e9e"} Dec 15 08:00:03 crc kubenswrapper[4876]: I1215 08:00:03.320713 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:03 crc kubenswrapper[4876]: I1215 08:00:03.435063 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff39bd75-4416-44bc-b207-29c57dbb3975-config-volume\") pod \"ff39bd75-4416-44bc-b207-29c57dbb3975\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " Dec 15 08:00:03 crc kubenswrapper[4876]: I1215 08:00:03.435129 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff39bd75-4416-44bc-b207-29c57dbb3975-secret-volume\") pod \"ff39bd75-4416-44bc-b207-29c57dbb3975\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " Dec 15 08:00:03 crc kubenswrapper[4876]: I1215 08:00:03.435236 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cg9f7\" (UniqueName: \"kubernetes.io/projected/ff39bd75-4416-44bc-b207-29c57dbb3975-kube-api-access-cg9f7\") pod \"ff39bd75-4416-44bc-b207-29c57dbb3975\" (UID: \"ff39bd75-4416-44bc-b207-29c57dbb3975\") " Dec 15 08:00:03 crc kubenswrapper[4876]: I1215 08:00:03.436006 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff39bd75-4416-44bc-b207-29c57dbb3975-config-volume" (OuterVolumeSpecName: "config-volume") pod "ff39bd75-4416-44bc-b207-29c57dbb3975" (UID: "ff39bd75-4416-44bc-b207-29c57dbb3975"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:00:03 crc kubenswrapper[4876]: I1215 08:00:03.439937 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff39bd75-4416-44bc-b207-29c57dbb3975-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ff39bd75-4416-44bc-b207-29c57dbb3975" (UID: "ff39bd75-4416-44bc-b207-29c57dbb3975"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:00:03 crc kubenswrapper[4876]: I1215 08:00:03.440119 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff39bd75-4416-44bc-b207-29c57dbb3975-kube-api-access-cg9f7" (OuterVolumeSpecName: "kube-api-access-cg9f7") pod "ff39bd75-4416-44bc-b207-29c57dbb3975" (UID: "ff39bd75-4416-44bc-b207-29c57dbb3975"). InnerVolumeSpecName "kube-api-access-cg9f7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:00:03 crc kubenswrapper[4876]: I1215 08:00:03.536672 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cg9f7\" (UniqueName: \"kubernetes.io/projected/ff39bd75-4416-44bc-b207-29c57dbb3975-kube-api-access-cg9f7\") on node \"crc\" DevicePath \"\"" Dec 15 08:00:03 crc kubenswrapper[4876]: I1215 08:00:03.536711 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff39bd75-4416-44bc-b207-29c57dbb3975-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 08:00:03 crc kubenswrapper[4876]: I1215 08:00:03.536725 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff39bd75-4416-44bc-b207-29c57dbb3975-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 08:00:04 crc kubenswrapper[4876]: I1215 08:00:04.069262 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" event={"ID":"ff39bd75-4416-44bc-b207-29c57dbb3975","Type":"ContainerDied","Data":"10ff8f99a2405d312a1921b9064db00d8b8a29c7e0f101b8b3a6ee1be2bc1e9e"} Dec 15 08:00:04 crc kubenswrapper[4876]: I1215 08:00:04.069531 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10ff8f99a2405d312a1921b9064db00d8b8a29c7e0f101b8b3a6ee1be2bc1e9e" Dec 15 08:00:04 crc kubenswrapper[4876]: I1215 08:00:04.069582 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg" Dec 15 08:00:04 crc kubenswrapper[4876]: I1215 08:00:04.387353 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb"] Dec 15 08:00:04 crc kubenswrapper[4876]: I1215 08:00:04.392118 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429715-qn2tb"] Dec 15 08:00:04 crc kubenswrapper[4876]: I1215 08:00:04.722073 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a" path="/var/lib/kubelet/pods/d59f4ef2-8822-4ce4-8b8f-5f2b3b75463a/volumes" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.612911 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-88ckg"] Dec 15 08:00:07 crc kubenswrapper[4876]: E1215 08:00:07.613537 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff39bd75-4416-44bc-b207-29c57dbb3975" containerName="collect-profiles" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.613552 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff39bd75-4416-44bc-b207-29c57dbb3975" containerName="collect-profiles" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.613733 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff39bd75-4416-44bc-b207-29c57dbb3975" containerName="collect-profiles" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.614925 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.622527 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-88ckg"] Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.694436 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwdfx\" (UniqueName: \"kubernetes.io/projected/11605779-f22a-47f6-b1c1-a102d29f8c5c-kube-api-access-hwdfx\") pod \"certified-operators-88ckg\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.694487 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-catalog-content\") pod \"certified-operators-88ckg\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.694532 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-utilities\") pod \"certified-operators-88ckg\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.796242 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-utilities\") pod \"certified-operators-88ckg\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.796359 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwdfx\" (UniqueName: \"kubernetes.io/projected/11605779-f22a-47f6-b1c1-a102d29f8c5c-kube-api-access-hwdfx\") pod \"certified-operators-88ckg\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.796387 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-catalog-content\") pod \"certified-operators-88ckg\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.797295 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-catalog-content\") pod \"certified-operators-88ckg\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.797339 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-utilities\") pod \"certified-operators-88ckg\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.821480 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwdfx\" (UniqueName: \"kubernetes.io/projected/11605779-f22a-47f6-b1c1-a102d29f8c5c-kube-api-access-hwdfx\") pod \"certified-operators-88ckg\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:07 crc kubenswrapper[4876]: I1215 08:00:07.937757 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:08 crc kubenswrapper[4876]: I1215 08:00:08.462945 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-88ckg"] Dec 15 08:00:09 crc kubenswrapper[4876]: I1215 08:00:09.113392 4876 generic.go:334] "Generic (PLEG): container finished" podID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerID="9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02" exitCode=0 Dec 15 08:00:09 crc kubenswrapper[4876]: I1215 08:00:09.113434 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-88ckg" event={"ID":"11605779-f22a-47f6-b1c1-a102d29f8c5c","Type":"ContainerDied","Data":"9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02"} Dec 15 08:00:09 crc kubenswrapper[4876]: I1215 08:00:09.113460 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-88ckg" event={"ID":"11605779-f22a-47f6-b1c1-a102d29f8c5c","Type":"ContainerStarted","Data":"34cd4344db6c249754494b89c2f599840e13efe7c7cb85bd97299fe63c6f7e9f"} Dec 15 08:00:12 crc kubenswrapper[4876]: I1215 08:00:12.133469 4876 generic.go:334] "Generic (PLEG): container finished" podID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerID="2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d" exitCode=0 Dec 15 08:00:12 crc kubenswrapper[4876]: I1215 08:00:12.133568 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-88ckg" event={"ID":"11605779-f22a-47f6-b1c1-a102d29f8c5c","Type":"ContainerDied","Data":"2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d"} Dec 15 08:00:13 crc kubenswrapper[4876]: I1215 08:00:13.145220 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-88ckg" event={"ID":"11605779-f22a-47f6-b1c1-a102d29f8c5c","Type":"ContainerStarted","Data":"d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a"} Dec 15 08:00:13 crc kubenswrapper[4876]: I1215 08:00:13.170973 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-88ckg" podStartSLOduration=2.418714636 podStartE2EDuration="6.170952498s" podCreationTimestamp="2025-12-15 08:00:07 +0000 UTC" firstStartedPulling="2025-12-15 08:00:09.115070638 +0000 UTC m=+4134.686213549" lastFinishedPulling="2025-12-15 08:00:12.8673085 +0000 UTC m=+4138.438451411" observedRunningTime="2025-12-15 08:00:13.165325249 +0000 UTC m=+4138.736468160" watchObservedRunningTime="2025-12-15 08:00:13.170952498 +0000 UTC m=+4138.742095419" Dec 15 08:00:17 crc kubenswrapper[4876]: I1215 08:00:17.937838 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:17 crc kubenswrapper[4876]: I1215 08:00:17.938439 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:17 crc kubenswrapper[4876]: I1215 08:00:17.976233 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:18 crc kubenswrapper[4876]: I1215 08:00:18.227159 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:18 crc kubenswrapper[4876]: I1215 08:00:18.278401 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-88ckg"] Dec 15 08:00:20 crc kubenswrapper[4876]: I1215 08:00:20.192573 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-88ckg" podUID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerName="registry-server" containerID="cri-o://d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a" gracePeriod=2 Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.067006 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.179555 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-catalog-content\") pod \"11605779-f22a-47f6-b1c1-a102d29f8c5c\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.179743 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-utilities\") pod \"11605779-f22a-47f6-b1c1-a102d29f8c5c\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.179911 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwdfx\" (UniqueName: \"kubernetes.io/projected/11605779-f22a-47f6-b1c1-a102d29f8c5c-kube-api-access-hwdfx\") pod \"11605779-f22a-47f6-b1c1-a102d29f8c5c\" (UID: \"11605779-f22a-47f6-b1c1-a102d29f8c5c\") " Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.181341 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-utilities" (OuterVolumeSpecName: "utilities") pod "11605779-f22a-47f6-b1c1-a102d29f8c5c" (UID: "11605779-f22a-47f6-b1c1-a102d29f8c5c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.185183 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11605779-f22a-47f6-b1c1-a102d29f8c5c-kube-api-access-hwdfx" (OuterVolumeSpecName: "kube-api-access-hwdfx") pod "11605779-f22a-47f6-b1c1-a102d29f8c5c" (UID: "11605779-f22a-47f6-b1c1-a102d29f8c5c"). InnerVolumeSpecName "kube-api-access-hwdfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.201619 4876 generic.go:334] "Generic (PLEG): container finished" podID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerID="d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a" exitCode=0 Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.201664 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-88ckg" event={"ID":"11605779-f22a-47f6-b1c1-a102d29f8c5c","Type":"ContainerDied","Data":"d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a"} Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.201691 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-88ckg" event={"ID":"11605779-f22a-47f6-b1c1-a102d29f8c5c","Type":"ContainerDied","Data":"34cd4344db6c249754494b89c2f599840e13efe7c7cb85bd97299fe63c6f7e9f"} Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.201742 4876 scope.go:117] "RemoveContainer" containerID="d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.201868 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-88ckg" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.222312 4876 scope.go:117] "RemoveContainer" containerID="2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.238399 4876 scope.go:117] "RemoveContainer" containerID="9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.246759 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11605779-f22a-47f6-b1c1-a102d29f8c5c" (UID: "11605779-f22a-47f6-b1c1-a102d29f8c5c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.259909 4876 scope.go:117] "RemoveContainer" containerID="d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a" Dec 15 08:00:21 crc kubenswrapper[4876]: E1215 08:00:21.260349 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a\": container with ID starting with d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a not found: ID does not exist" containerID="d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.260384 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a"} err="failed to get container status \"d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a\": rpc error: code = NotFound desc = could not find container \"d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a\": container with ID starting with d29d7950f0fd4d8dca677a44f9d816a082b048fb092d0b09280bd5ec980a830a not found: ID does not exist" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.260408 4876 scope.go:117] "RemoveContainer" containerID="2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d" Dec 15 08:00:21 crc kubenswrapper[4876]: E1215 08:00:21.260655 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d\": container with ID starting with 2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d not found: ID does not exist" containerID="2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.260690 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d"} err="failed to get container status \"2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d\": rpc error: code = NotFound desc = could not find container \"2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d\": container with ID starting with 2411abf720db52e0e547f7d6275c2169646cb3228bd701de1cd622f30b69758d not found: ID does not exist" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.260713 4876 scope.go:117] "RemoveContainer" containerID="9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02" Dec 15 08:00:21 crc kubenswrapper[4876]: E1215 08:00:21.260949 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02\": container with ID starting with 9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02 not found: ID does not exist" containerID="9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.260974 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02"} err="failed to get container status \"9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02\": rpc error: code = NotFound desc = could not find container \"9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02\": container with ID starting with 9f2177b47e4362187e69aad6edc2d3f66fa859f8ab5eca7b65600ba94146ac02 not found: ID does not exist" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.282038 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.282075 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11605779-f22a-47f6-b1c1-a102d29f8c5c-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.282088 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwdfx\" (UniqueName: \"kubernetes.io/projected/11605779-f22a-47f6-b1c1-a102d29f8c5c-kube-api-access-hwdfx\") on node \"crc\" DevicePath \"\"" Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.534265 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-88ckg"] Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.539503 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-88ckg"] Dec 15 08:00:21 crc kubenswrapper[4876]: I1215 08:00:21.816484 4876 scope.go:117] "RemoveContainer" containerID="78e4aa3572d08fd3a46a9cee1a0093bf6fe2a0c703edf0a99556aaca1cc65182" Dec 15 08:00:22 crc kubenswrapper[4876]: I1215 08:00:22.712753 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11605779-f22a-47f6-b1c1-a102d29f8c5c" path="/var/lib/kubelet/pods/11605779-f22a-47f6-b1c1-a102d29f8c5c/volumes" Dec 15 08:01:27 crc kubenswrapper[4876]: I1215 08:01:27.322175 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:01:27 crc kubenswrapper[4876]: I1215 08:01:27.322565 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:01:57 crc kubenswrapper[4876]: I1215 08:01:57.322700 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:01:57 crc kubenswrapper[4876]: I1215 08:01:57.323150 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:02:27 crc kubenswrapper[4876]: I1215 08:02:27.322878 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:02:27 crc kubenswrapper[4876]: I1215 08:02:27.323485 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:02:27 crc kubenswrapper[4876]: I1215 08:02:27.323531 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 08:02:27 crc kubenswrapper[4876]: I1215 08:02:27.324151 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 08:02:27 crc kubenswrapper[4876]: I1215 08:02:27.324233 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" gracePeriod=600 Dec 15 08:02:27 crc kubenswrapper[4876]: E1215 08:02:27.461809 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:02:28 crc kubenswrapper[4876]: I1215 08:02:28.136415 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" exitCode=0 Dec 15 08:02:28 crc kubenswrapper[4876]: I1215 08:02:28.136462 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c"} Dec 15 08:02:28 crc kubenswrapper[4876]: I1215 08:02:28.136499 4876 scope.go:117] "RemoveContainer" containerID="025664be1aef554873bd1a5378aebf1083a31b6f2465234001b378edd8a789f1" Dec 15 08:02:28 crc kubenswrapper[4876]: I1215 08:02:28.136977 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:02:28 crc kubenswrapper[4876]: E1215 08:02:28.137250 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:02:43 crc kubenswrapper[4876]: I1215 08:02:43.705901 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:02:43 crc kubenswrapper[4876]: E1215 08:02:43.708200 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:02:54 crc kubenswrapper[4876]: I1215 08:02:54.710977 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:02:54 crc kubenswrapper[4876]: E1215 08:02:54.711973 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:03:08 crc kubenswrapper[4876]: I1215 08:03:08.705448 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:03:08 crc kubenswrapper[4876]: E1215 08:03:08.706354 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:03:21 crc kubenswrapper[4876]: I1215 08:03:21.327748 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:03:21 crc kubenswrapper[4876]: E1215 08:03:21.328419 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:03:31 crc kubenswrapper[4876]: I1215 08:03:31.705744 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:03:31 crc kubenswrapper[4876]: E1215 08:03:31.706967 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:03:43 crc kubenswrapper[4876]: I1215 08:03:43.705345 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:03:43 crc kubenswrapper[4876]: E1215 08:03:43.706362 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:03:58 crc kubenswrapper[4876]: I1215 08:03:58.706322 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:03:58 crc kubenswrapper[4876]: E1215 08:03:58.707168 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:04:13 crc kubenswrapper[4876]: I1215 08:04:13.705912 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:04:13 crc kubenswrapper[4876]: E1215 08:04:13.708305 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:04:24 crc kubenswrapper[4876]: I1215 08:04:24.709814 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:04:24 crc kubenswrapper[4876]: E1215 08:04:24.710866 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:04:35 crc kubenswrapper[4876]: I1215 08:04:35.706260 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:04:35 crc kubenswrapper[4876]: E1215 08:04:35.707148 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:04:46 crc kubenswrapper[4876]: I1215 08:04:46.706253 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:04:46 crc kubenswrapper[4876]: E1215 08:04:46.707358 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:05:00 crc kubenswrapper[4876]: I1215 08:05:00.705496 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:05:00 crc kubenswrapper[4876]: E1215 08:05:00.706252 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:05:14 crc kubenswrapper[4876]: I1215 08:05:14.708868 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:05:14 crc kubenswrapper[4876]: E1215 08:05:14.709458 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.282974 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mrhxf"] Dec 15 08:05:20 crc kubenswrapper[4876]: E1215 08:05:20.289365 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerName="registry-server" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.289392 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerName="registry-server" Dec 15 08:05:20 crc kubenswrapper[4876]: E1215 08:05:20.289411 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerName="extract-utilities" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.289422 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerName="extract-utilities" Dec 15 08:05:20 crc kubenswrapper[4876]: E1215 08:05:20.289441 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerName="extract-content" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.289451 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerName="extract-content" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.289680 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="11605779-f22a-47f6-b1c1-a102d29f8c5c" containerName="registry-server" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.291203 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mrhxf"] Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.291341 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.347179 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch98f\" (UniqueName: \"kubernetes.io/projected/53d37d0e-8f57-476c-ae20-91b83f13bf3f-kube-api-access-ch98f\") pod \"community-operators-mrhxf\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.347241 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-catalog-content\") pod \"community-operators-mrhxf\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.347541 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-utilities\") pod \"community-operators-mrhxf\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.448549 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-utilities\") pod \"community-operators-mrhxf\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.448611 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch98f\" (UniqueName: \"kubernetes.io/projected/53d37d0e-8f57-476c-ae20-91b83f13bf3f-kube-api-access-ch98f\") pod \"community-operators-mrhxf\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.448630 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-catalog-content\") pod \"community-operators-mrhxf\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.449090 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-utilities\") pod \"community-operators-mrhxf\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.449126 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-catalog-content\") pod \"community-operators-mrhxf\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.470244 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch98f\" (UniqueName: \"kubernetes.io/projected/53d37d0e-8f57-476c-ae20-91b83f13bf3f-kube-api-access-ch98f\") pod \"community-operators-mrhxf\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:20 crc kubenswrapper[4876]: I1215 08:05:20.616193 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:21 crc kubenswrapper[4876]: I1215 08:05:21.090447 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mrhxf"] Dec 15 08:05:21 crc kubenswrapper[4876]: I1215 08:05:21.464625 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mrhxf" event={"ID":"53d37d0e-8f57-476c-ae20-91b83f13bf3f","Type":"ContainerStarted","Data":"c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858"} Dec 15 08:05:21 crc kubenswrapper[4876]: I1215 08:05:21.464842 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mrhxf" event={"ID":"53d37d0e-8f57-476c-ae20-91b83f13bf3f","Type":"ContainerStarted","Data":"2bbfd6fcfc2be174139c552e87b56c9c71cdddc796af1828d0c0188d66378b2c"} Dec 15 08:05:22 crc kubenswrapper[4876]: I1215 08:05:22.472259 4876 generic.go:334] "Generic (PLEG): container finished" podID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerID="c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858" exitCode=0 Dec 15 08:05:22 crc kubenswrapper[4876]: I1215 08:05:22.472323 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mrhxf" event={"ID":"53d37d0e-8f57-476c-ae20-91b83f13bf3f","Type":"ContainerDied","Data":"c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858"} Dec 15 08:05:22 crc kubenswrapper[4876]: I1215 08:05:22.474052 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 08:05:24 crc kubenswrapper[4876]: I1215 08:05:24.488207 4876 generic.go:334] "Generic (PLEG): container finished" podID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerID="d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466" exitCode=0 Dec 15 08:05:24 crc kubenswrapper[4876]: I1215 08:05:24.488272 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mrhxf" event={"ID":"53d37d0e-8f57-476c-ae20-91b83f13bf3f","Type":"ContainerDied","Data":"d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466"} Dec 15 08:05:25 crc kubenswrapper[4876]: I1215 08:05:25.497255 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mrhxf" event={"ID":"53d37d0e-8f57-476c-ae20-91b83f13bf3f","Type":"ContainerStarted","Data":"549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f"} Dec 15 08:05:25 crc kubenswrapper[4876]: I1215 08:05:25.518467 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mrhxf" podStartSLOduration=2.9818009610000002 podStartE2EDuration="5.518436725s" podCreationTimestamp="2025-12-15 08:05:20 +0000 UTC" firstStartedPulling="2025-12-15 08:05:22.473835025 +0000 UTC m=+4448.044977936" lastFinishedPulling="2025-12-15 08:05:25.010470749 +0000 UTC m=+4450.581613700" observedRunningTime="2025-12-15 08:05:25.513618405 +0000 UTC m=+4451.084761336" watchObservedRunningTime="2025-12-15 08:05:25.518436725 +0000 UTC m=+4451.089579636" Dec 15 08:05:26 crc kubenswrapper[4876]: I1215 08:05:26.705583 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:05:26 crc kubenswrapper[4876]: E1215 08:05:26.705990 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:05:30 crc kubenswrapper[4876]: I1215 08:05:30.616651 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:30 crc kubenswrapper[4876]: I1215 08:05:30.617463 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:30 crc kubenswrapper[4876]: I1215 08:05:30.654929 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:31 crc kubenswrapper[4876]: I1215 08:05:31.581222 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:31 crc kubenswrapper[4876]: I1215 08:05:31.629941 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mrhxf"] Dec 15 08:05:33 crc kubenswrapper[4876]: I1215 08:05:33.549505 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mrhxf" podUID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerName="registry-server" containerID="cri-o://549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f" gracePeriod=2 Dec 15 08:05:33 crc kubenswrapper[4876]: I1215 08:05:33.976096 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.091458 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-catalog-content\") pod \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.091920 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch98f\" (UniqueName: \"kubernetes.io/projected/53d37d0e-8f57-476c-ae20-91b83f13bf3f-kube-api-access-ch98f\") pod \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.091957 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-utilities\") pod \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\" (UID: \"53d37d0e-8f57-476c-ae20-91b83f13bf3f\") " Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.092914 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-utilities" (OuterVolumeSpecName: "utilities") pod "53d37d0e-8f57-476c-ae20-91b83f13bf3f" (UID: "53d37d0e-8f57-476c-ae20-91b83f13bf3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.097491 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53d37d0e-8f57-476c-ae20-91b83f13bf3f-kube-api-access-ch98f" (OuterVolumeSpecName: "kube-api-access-ch98f") pod "53d37d0e-8f57-476c-ae20-91b83f13bf3f" (UID: "53d37d0e-8f57-476c-ae20-91b83f13bf3f"). InnerVolumeSpecName "kube-api-access-ch98f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.146051 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53d37d0e-8f57-476c-ae20-91b83f13bf3f" (UID: "53d37d0e-8f57-476c-ae20-91b83f13bf3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.193333 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch98f\" (UniqueName: \"kubernetes.io/projected/53d37d0e-8f57-476c-ae20-91b83f13bf3f-kube-api-access-ch98f\") on node \"crc\" DevicePath \"\"" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.193367 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.193377 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53d37d0e-8f57-476c-ae20-91b83f13bf3f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.562089 4876 generic.go:334] "Generic (PLEG): container finished" podID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerID="549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f" exitCode=0 Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.562165 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mrhxf" event={"ID":"53d37d0e-8f57-476c-ae20-91b83f13bf3f","Type":"ContainerDied","Data":"549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f"} Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.562227 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mrhxf" event={"ID":"53d37d0e-8f57-476c-ae20-91b83f13bf3f","Type":"ContainerDied","Data":"2bbfd6fcfc2be174139c552e87b56c9c71cdddc796af1828d0c0188d66378b2c"} Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.562252 4876 scope.go:117] "RemoveContainer" containerID="549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.562183 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mrhxf" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.582535 4876 scope.go:117] "RemoveContainer" containerID="d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.603904 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mrhxf"] Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.607546 4876 scope.go:117] "RemoveContainer" containerID="c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.610812 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mrhxf"] Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.628267 4876 scope.go:117] "RemoveContainer" containerID="549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f" Dec 15 08:05:34 crc kubenswrapper[4876]: E1215 08:05:34.628671 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f\": container with ID starting with 549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f not found: ID does not exist" containerID="549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.628724 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f"} err="failed to get container status \"549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f\": rpc error: code = NotFound desc = could not find container \"549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f\": container with ID starting with 549bf20c4a28a6e569801366b4445f0af6a958eada5769a37d677df453dd394f not found: ID does not exist" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.628746 4876 scope.go:117] "RemoveContainer" containerID="d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466" Dec 15 08:05:34 crc kubenswrapper[4876]: E1215 08:05:34.628993 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466\": container with ID starting with d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466 not found: ID does not exist" containerID="d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.629013 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466"} err="failed to get container status \"d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466\": rpc error: code = NotFound desc = could not find container \"d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466\": container with ID starting with d668b170fe247df079643eba5ac0ce5b4922571655a6fee6b2e2aca8ffbfe466 not found: ID does not exist" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.629029 4876 scope.go:117] "RemoveContainer" containerID="c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858" Dec 15 08:05:34 crc kubenswrapper[4876]: E1215 08:05:34.629252 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858\": container with ID starting with c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858 not found: ID does not exist" containerID="c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.629270 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858"} err="failed to get container status \"c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858\": rpc error: code = NotFound desc = could not find container \"c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858\": container with ID starting with c0477cc6e01d7773ef10a8964295c4b3a77706bd87a6d8c40511217d06cbb858 not found: ID does not exist" Dec 15 08:05:34 crc kubenswrapper[4876]: I1215 08:05:34.714441 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" path="/var/lib/kubelet/pods/53d37d0e-8f57-476c-ae20-91b83f13bf3f/volumes" Dec 15 08:05:38 crc kubenswrapper[4876]: I1215 08:05:38.706320 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:05:38 crc kubenswrapper[4876]: E1215 08:05:38.706810 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:05:51 crc kubenswrapper[4876]: I1215 08:05:51.705454 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:05:51 crc kubenswrapper[4876]: E1215 08:05:51.706402 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:06:03 crc kubenswrapper[4876]: I1215 08:06:03.705736 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:06:03 crc kubenswrapper[4876]: E1215 08:06:03.707193 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:06:17 crc kubenswrapper[4876]: I1215 08:06:17.706062 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:06:17 crc kubenswrapper[4876]: E1215 08:06:17.706956 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:06:29 crc kubenswrapper[4876]: I1215 08:06:29.707155 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:06:29 crc kubenswrapper[4876]: E1215 08:06:29.708573 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:06:43 crc kubenswrapper[4876]: I1215 08:06:43.705487 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:06:43 crc kubenswrapper[4876]: E1215 08:06:43.706220 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:06:58 crc kubenswrapper[4876]: I1215 08:06:58.706425 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:06:58 crc kubenswrapper[4876]: E1215 08:06:58.707369 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:07:09 crc kubenswrapper[4876]: I1215 08:07:09.705632 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:07:09 crc kubenswrapper[4876]: E1215 08:07:09.706444 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:07:24 crc kubenswrapper[4876]: I1215 08:07:24.710836 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:07:24 crc kubenswrapper[4876]: E1215 08:07:24.712540 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.579059 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9lmj4"] Dec 15 08:07:31 crc kubenswrapper[4876]: E1215 08:07:31.580018 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerName="extract-content" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.580038 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerName="extract-content" Dec 15 08:07:31 crc kubenswrapper[4876]: E1215 08:07:31.580054 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerName="registry-server" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.580062 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerName="registry-server" Dec 15 08:07:31 crc kubenswrapper[4876]: E1215 08:07:31.580076 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerName="extract-utilities" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.580085 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerName="extract-utilities" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.580268 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="53d37d0e-8f57-476c-ae20-91b83f13bf3f" containerName="registry-server" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.581484 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.593756 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9lmj4"] Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.657702 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-utilities\") pod \"redhat-operators-9lmj4\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.657778 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f96jj\" (UniqueName: \"kubernetes.io/projected/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-kube-api-access-f96jj\") pod \"redhat-operators-9lmj4\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.657803 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-catalog-content\") pod \"redhat-operators-9lmj4\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.760199 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-utilities\") pod \"redhat-operators-9lmj4\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.760265 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f96jj\" (UniqueName: \"kubernetes.io/projected/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-kube-api-access-f96jj\") pod \"redhat-operators-9lmj4\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.760296 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-catalog-content\") pod \"redhat-operators-9lmj4\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.760696 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-utilities\") pod \"redhat-operators-9lmj4\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.760849 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-catalog-content\") pod \"redhat-operators-9lmj4\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.791192 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f96jj\" (UniqueName: \"kubernetes.io/projected/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-kube-api-access-f96jj\") pod \"redhat-operators-9lmj4\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:31 crc kubenswrapper[4876]: I1215 08:07:31.903901 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:32 crc kubenswrapper[4876]: I1215 08:07:32.347610 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9lmj4"] Dec 15 08:07:32 crc kubenswrapper[4876]: I1215 08:07:32.430130 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lmj4" event={"ID":"6e3c05a4-5791-4dce-bfdd-789c8fa0a109","Type":"ContainerStarted","Data":"7d2518f9deba047ba3e952a2dcd009faa1f4127ab710ffb36cf7a723f70cd594"} Dec 15 08:07:33 crc kubenswrapper[4876]: I1215 08:07:33.440977 4876 generic.go:334] "Generic (PLEG): container finished" podID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerID="d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259" exitCode=0 Dec 15 08:07:33 crc kubenswrapper[4876]: I1215 08:07:33.441058 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lmj4" event={"ID":"6e3c05a4-5791-4dce-bfdd-789c8fa0a109","Type":"ContainerDied","Data":"d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259"} Dec 15 08:07:34 crc kubenswrapper[4876]: I1215 08:07:34.450218 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lmj4" event={"ID":"6e3c05a4-5791-4dce-bfdd-789c8fa0a109","Type":"ContainerStarted","Data":"0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b"} Dec 15 08:07:35 crc kubenswrapper[4876]: I1215 08:07:35.459651 4876 generic.go:334] "Generic (PLEG): container finished" podID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerID="0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b" exitCode=0 Dec 15 08:07:35 crc kubenswrapper[4876]: I1215 08:07:35.459910 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lmj4" event={"ID":"6e3c05a4-5791-4dce-bfdd-789c8fa0a109","Type":"ContainerDied","Data":"0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b"} Dec 15 08:07:35 crc kubenswrapper[4876]: I1215 08:07:35.705694 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:07:36 crc kubenswrapper[4876]: I1215 08:07:36.471155 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lmj4" event={"ID":"6e3c05a4-5791-4dce-bfdd-789c8fa0a109","Type":"ContainerStarted","Data":"b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3"} Dec 15 08:07:36 crc kubenswrapper[4876]: I1215 08:07:36.474920 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"36f38b36521b00f9a4d0ec44d72cef737ce209760cb13918b814a3cb5214224b"} Dec 15 08:07:36 crc kubenswrapper[4876]: I1215 08:07:36.492265 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9lmj4" podStartSLOduration=3.02055767 podStartE2EDuration="5.492237332s" podCreationTimestamp="2025-12-15 08:07:31 +0000 UTC" firstStartedPulling="2025-12-15 08:07:33.443659444 +0000 UTC m=+4579.014802355" lastFinishedPulling="2025-12-15 08:07:35.915339106 +0000 UTC m=+4581.486482017" observedRunningTime="2025-12-15 08:07:36.488660826 +0000 UTC m=+4582.059803737" watchObservedRunningTime="2025-12-15 08:07:36.492237332 +0000 UTC m=+4582.063380243" Dec 15 08:07:41 crc kubenswrapper[4876]: I1215 08:07:41.904772 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:41 crc kubenswrapper[4876]: I1215 08:07:41.905357 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:41 crc kubenswrapper[4876]: I1215 08:07:41.949650 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:42 crc kubenswrapper[4876]: I1215 08:07:42.557825 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:42 crc kubenswrapper[4876]: I1215 08:07:42.613304 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9lmj4"] Dec 15 08:07:44 crc kubenswrapper[4876]: I1215 08:07:44.529893 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9lmj4" podUID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerName="registry-server" containerID="cri-o://b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3" gracePeriod=2 Dec 15 08:07:44 crc kubenswrapper[4876]: I1215 08:07:44.932047 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:44 crc kubenswrapper[4876]: I1215 08:07:44.949357 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-catalog-content\") pod \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " Dec 15 08:07:44 crc kubenswrapper[4876]: I1215 08:07:44.949548 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-utilities\") pod \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " Dec 15 08:07:44 crc kubenswrapper[4876]: I1215 08:07:44.949589 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f96jj\" (UniqueName: \"kubernetes.io/projected/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-kube-api-access-f96jj\") pod \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\" (UID: \"6e3c05a4-5791-4dce-bfdd-789c8fa0a109\") " Dec 15 08:07:44 crc kubenswrapper[4876]: I1215 08:07:44.952580 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-utilities" (OuterVolumeSpecName: "utilities") pod "6e3c05a4-5791-4dce-bfdd-789c8fa0a109" (UID: "6e3c05a4-5791-4dce-bfdd-789c8fa0a109"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:07:44 crc kubenswrapper[4876]: I1215 08:07:44.961081 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-kube-api-access-f96jj" (OuterVolumeSpecName: "kube-api-access-f96jj") pod "6e3c05a4-5791-4dce-bfdd-789c8fa0a109" (UID: "6e3c05a4-5791-4dce-bfdd-789c8fa0a109"). InnerVolumeSpecName "kube-api-access-f96jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.051055 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.051162 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f96jj\" (UniqueName: \"kubernetes.io/projected/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-kube-api-access-f96jj\") on node \"crc\" DevicePath \"\"" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.540029 4876 generic.go:334] "Generic (PLEG): container finished" podID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerID="b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3" exitCode=0 Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.540077 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lmj4" event={"ID":"6e3c05a4-5791-4dce-bfdd-789c8fa0a109","Type":"ContainerDied","Data":"b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3"} Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.540091 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9lmj4" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.540128 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lmj4" event={"ID":"6e3c05a4-5791-4dce-bfdd-789c8fa0a109","Type":"ContainerDied","Data":"7d2518f9deba047ba3e952a2dcd009faa1f4127ab710ffb36cf7a723f70cd594"} Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.540173 4876 scope.go:117] "RemoveContainer" containerID="b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.560476 4876 scope.go:117] "RemoveContainer" containerID="0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.672567 4876 scope.go:117] "RemoveContainer" containerID="d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.701222 4876 scope.go:117] "RemoveContainer" containerID="b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3" Dec 15 08:07:45 crc kubenswrapper[4876]: E1215 08:07:45.701898 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3\": container with ID starting with b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3 not found: ID does not exist" containerID="b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.701938 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3"} err="failed to get container status \"b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3\": rpc error: code = NotFound desc = could not find container \"b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3\": container with ID starting with b3e5dbd3339c8a96136437572a0be972dce3ddd79d5daeb8588685ca0d8785a3 not found: ID does not exist" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.701972 4876 scope.go:117] "RemoveContainer" containerID="0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b" Dec 15 08:07:45 crc kubenswrapper[4876]: E1215 08:07:45.702355 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b\": container with ID starting with 0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b not found: ID does not exist" containerID="0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.702384 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b"} err="failed to get container status \"0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b\": rpc error: code = NotFound desc = could not find container \"0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b\": container with ID starting with 0d910eb8e1731bed17e2e5552a76f035e30f6c3cd04748ec24cc35de3c17a04b not found: ID does not exist" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.702401 4876 scope.go:117] "RemoveContainer" containerID="d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259" Dec 15 08:07:45 crc kubenswrapper[4876]: E1215 08:07:45.702768 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259\": container with ID starting with d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259 not found: ID does not exist" containerID="d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259" Dec 15 08:07:45 crc kubenswrapper[4876]: I1215 08:07:45.702798 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259"} err="failed to get container status \"d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259\": rpc error: code = NotFound desc = could not find container \"d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259\": container with ID starting with d7b89f8ba061024d565f69ea621811fa077dd657e8fc5e24cdd8965feee59259 not found: ID does not exist" Dec 15 08:07:46 crc kubenswrapper[4876]: I1215 08:07:46.305590 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6e3c05a4-5791-4dce-bfdd-789c8fa0a109" (UID: "6e3c05a4-5791-4dce-bfdd-789c8fa0a109"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:07:46 crc kubenswrapper[4876]: I1215 08:07:46.370532 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e3c05a4-5791-4dce-bfdd-789c8fa0a109-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:07:46 crc kubenswrapper[4876]: I1215 08:07:46.482681 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9lmj4"] Dec 15 08:07:46 crc kubenswrapper[4876]: I1215 08:07:46.490377 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9lmj4"] Dec 15 08:07:46 crc kubenswrapper[4876]: I1215 08:07:46.714276 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" path="/var/lib/kubelet/pods/6e3c05a4-5791-4dce-bfdd-789c8fa0a109/volumes" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.659519 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2st95"] Dec 15 08:09:21 crc kubenswrapper[4876]: E1215 08:09:21.660375 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerName="registry-server" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.660392 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerName="registry-server" Dec 15 08:09:21 crc kubenswrapper[4876]: E1215 08:09:21.660407 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerName="extract-content" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.660415 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerName="extract-content" Dec 15 08:09:21 crc kubenswrapper[4876]: E1215 08:09:21.660428 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerName="extract-utilities" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.660436 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerName="extract-utilities" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.660624 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e3c05a4-5791-4dce-bfdd-789c8fa0a109" containerName="registry-server" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.661858 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.678159 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2st95"] Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.702558 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-utilities\") pod \"redhat-marketplace-2st95\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.702668 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-catalog-content\") pod \"redhat-marketplace-2st95\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.702725 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xw4h\" (UniqueName: \"kubernetes.io/projected/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-kube-api-access-5xw4h\") pod \"redhat-marketplace-2st95\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.806052 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-utilities\") pod \"redhat-marketplace-2st95\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.806437 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-catalog-content\") pod \"redhat-marketplace-2st95\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.806504 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xw4h\" (UniqueName: \"kubernetes.io/projected/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-kube-api-access-5xw4h\") pod \"redhat-marketplace-2st95\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.806771 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-catalog-content\") pod \"redhat-marketplace-2st95\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.806868 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-utilities\") pod \"redhat-marketplace-2st95\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.836619 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xw4h\" (UniqueName: \"kubernetes.io/projected/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-kube-api-access-5xw4h\") pod \"redhat-marketplace-2st95\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:21 crc kubenswrapper[4876]: I1215 08:09:21.988394 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:22 crc kubenswrapper[4876]: I1215 08:09:22.425262 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2st95"] Dec 15 08:09:23 crc kubenswrapper[4876]: I1215 08:09:23.219692 4876 generic.go:334] "Generic (PLEG): container finished" podID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerID="cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4" exitCode=0 Dec 15 08:09:23 crc kubenswrapper[4876]: I1215 08:09:23.219736 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2st95" event={"ID":"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d","Type":"ContainerDied","Data":"cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4"} Dec 15 08:09:23 crc kubenswrapper[4876]: I1215 08:09:23.219763 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2st95" event={"ID":"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d","Type":"ContainerStarted","Data":"90931951312ed9e2fa9b04a3a79908789e7559ed303ac2bef7d44d122faf7946"} Dec 15 08:09:24 crc kubenswrapper[4876]: I1215 08:09:24.227733 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2st95" event={"ID":"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d","Type":"ContainerStarted","Data":"2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73"} Dec 15 08:09:25 crc kubenswrapper[4876]: I1215 08:09:25.235944 4876 generic.go:334] "Generic (PLEG): container finished" podID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerID="2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73" exitCode=0 Dec 15 08:09:25 crc kubenswrapper[4876]: I1215 08:09:25.235995 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2st95" event={"ID":"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d","Type":"ContainerDied","Data":"2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73"} Dec 15 08:09:26 crc kubenswrapper[4876]: I1215 08:09:26.243834 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2st95" event={"ID":"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d","Type":"ContainerStarted","Data":"e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad"} Dec 15 08:09:31 crc kubenswrapper[4876]: I1215 08:09:31.989134 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:31 crc kubenswrapper[4876]: I1215 08:09:31.989444 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:32 crc kubenswrapper[4876]: I1215 08:09:32.028008 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:32 crc kubenswrapper[4876]: I1215 08:09:32.048210 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2st95" podStartSLOduration=8.584472379 podStartE2EDuration="11.048171575s" podCreationTimestamp="2025-12-15 08:09:21 +0000 UTC" firstStartedPulling="2025-12-15 08:09:23.222137068 +0000 UTC m=+4688.793279979" lastFinishedPulling="2025-12-15 08:09:25.685836254 +0000 UTC m=+4691.256979175" observedRunningTime="2025-12-15 08:09:26.265222057 +0000 UTC m=+4691.836364958" watchObservedRunningTime="2025-12-15 08:09:32.048171575 +0000 UTC m=+4697.619314486" Dec 15 08:09:32 crc kubenswrapper[4876]: I1215 08:09:32.339216 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:32 crc kubenswrapper[4876]: I1215 08:09:32.387325 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2st95"] Dec 15 08:09:34 crc kubenswrapper[4876]: I1215 08:09:34.304335 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2st95" podUID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerName="registry-server" containerID="cri-o://e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad" gracePeriod=2 Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.182228 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.296373 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xw4h\" (UniqueName: \"kubernetes.io/projected/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-kube-api-access-5xw4h\") pod \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.296467 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-utilities\") pod \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.296620 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-catalog-content\") pod \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\" (UID: \"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d\") " Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.297404 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-utilities" (OuterVolumeSpecName: "utilities") pod "f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" (UID: "f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.302302 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-kube-api-access-5xw4h" (OuterVolumeSpecName: "kube-api-access-5xw4h") pod "f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" (UID: "f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d"). InnerVolumeSpecName "kube-api-access-5xw4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.312761 4876 generic.go:334] "Generic (PLEG): container finished" podID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerID="e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad" exitCode=0 Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.312817 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2st95" event={"ID":"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d","Type":"ContainerDied","Data":"e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad"} Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.312851 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2st95" event={"ID":"f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d","Type":"ContainerDied","Data":"90931951312ed9e2fa9b04a3a79908789e7559ed303ac2bef7d44d122faf7946"} Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.312875 4876 scope.go:117] "RemoveContainer" containerID="e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.313014 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2st95" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.318365 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" (UID: "f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.340224 4876 scope.go:117] "RemoveContainer" containerID="2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.356850 4876 scope.go:117] "RemoveContainer" containerID="cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.380987 4876 scope.go:117] "RemoveContainer" containerID="e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad" Dec 15 08:09:35 crc kubenswrapper[4876]: E1215 08:09:35.381440 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad\": container with ID starting with e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad not found: ID does not exist" containerID="e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.381470 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad"} err="failed to get container status \"e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad\": rpc error: code = NotFound desc = could not find container \"e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad\": container with ID starting with e9c9fe3efd398d7bea7be9d6975ab718e226775f05ee7e8838e5cd84bd7577ad not found: ID does not exist" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.381491 4876 scope.go:117] "RemoveContainer" containerID="2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73" Dec 15 08:09:35 crc kubenswrapper[4876]: E1215 08:09:35.381733 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73\": container with ID starting with 2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73 not found: ID does not exist" containerID="2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.381752 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73"} err="failed to get container status \"2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73\": rpc error: code = NotFound desc = could not find container \"2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73\": container with ID starting with 2d66a57bac6776c6dbdfe750fa86e0fa1b973fb3e2e9064c137841eb94bbbf73 not found: ID does not exist" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.381766 4876 scope.go:117] "RemoveContainer" containerID="cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4" Dec 15 08:09:35 crc kubenswrapper[4876]: E1215 08:09:35.382044 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4\": container with ID starting with cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4 not found: ID does not exist" containerID="cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.382074 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4"} err="failed to get container status \"cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4\": rpc error: code = NotFound desc = could not find container \"cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4\": container with ID starting with cf7cd37e340d6e54b8aec188350643f8e4ef6f6d744f4f478e4fff0dd69f81d4 not found: ID does not exist" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.398297 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.398352 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xw4h\" (UniqueName: \"kubernetes.io/projected/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-kube-api-access-5xw4h\") on node \"crc\" DevicePath \"\"" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.398362 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.646257 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2st95"] Dec 15 08:09:35 crc kubenswrapper[4876]: I1215 08:09:35.650210 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2st95"] Dec 15 08:09:36 crc kubenswrapper[4876]: I1215 08:09:36.713762 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" path="/var/lib/kubelet/pods/f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d/volumes" Dec 15 08:09:57 crc kubenswrapper[4876]: I1215 08:09:57.323010 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:09:57 crc kubenswrapper[4876]: I1215 08:09:57.323633 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:10:27 crc kubenswrapper[4876]: I1215 08:10:27.322524 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:10:27 crc kubenswrapper[4876]: I1215 08:10:27.323247 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.393222 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fffv5"] Dec 15 08:10:48 crc kubenswrapper[4876]: E1215 08:10:48.394029 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerName="registry-server" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.394042 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerName="registry-server" Dec 15 08:10:48 crc kubenswrapper[4876]: E1215 08:10:48.394059 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerName="extract-content" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.394065 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerName="extract-content" Dec 15 08:10:48 crc kubenswrapper[4876]: E1215 08:10:48.394081 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerName="extract-utilities" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.394087 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerName="extract-utilities" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.394248 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f91c3693-7b71-4e0e-93ca-c0c33f5bdf9d" containerName="registry-server" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.395396 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.407330 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fffv5"] Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.552004 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp5wj\" (UniqueName: \"kubernetes.io/projected/3a9f8303-db6e-464a-a67e-90cdda78c138-kube-api-access-vp5wj\") pod \"certified-operators-fffv5\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.552146 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-utilities\") pod \"certified-operators-fffv5\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.552184 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-catalog-content\") pod \"certified-operators-fffv5\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.653394 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-utilities\") pod \"certified-operators-fffv5\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.653449 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-catalog-content\") pod \"certified-operators-fffv5\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.653500 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp5wj\" (UniqueName: \"kubernetes.io/projected/3a9f8303-db6e-464a-a67e-90cdda78c138-kube-api-access-vp5wj\") pod \"certified-operators-fffv5\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.654002 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-utilities\") pod \"certified-operators-fffv5\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.654002 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-catalog-content\") pod \"certified-operators-fffv5\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.675031 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp5wj\" (UniqueName: \"kubernetes.io/projected/3a9f8303-db6e-464a-a67e-90cdda78c138-kube-api-access-vp5wj\") pod \"certified-operators-fffv5\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:48 crc kubenswrapper[4876]: I1215 08:10:48.712817 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:49 crc kubenswrapper[4876]: I1215 08:10:49.177274 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fffv5"] Dec 15 08:10:49 crc kubenswrapper[4876]: I1215 08:10:49.819613 4876 generic.go:334] "Generic (PLEG): container finished" podID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerID="24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696" exitCode=0 Dec 15 08:10:49 crc kubenswrapper[4876]: I1215 08:10:49.819686 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fffv5" event={"ID":"3a9f8303-db6e-464a-a67e-90cdda78c138","Type":"ContainerDied","Data":"24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696"} Dec 15 08:10:49 crc kubenswrapper[4876]: I1215 08:10:49.819968 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fffv5" event={"ID":"3a9f8303-db6e-464a-a67e-90cdda78c138","Type":"ContainerStarted","Data":"2e2e9507ce85db597d6ce323d0a53643696b22992ca00671057e232279383b03"} Dec 15 08:10:49 crc kubenswrapper[4876]: I1215 08:10:49.821380 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 08:10:51 crc kubenswrapper[4876]: I1215 08:10:51.834056 4876 generic.go:334] "Generic (PLEG): container finished" podID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerID="81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52" exitCode=0 Dec 15 08:10:51 crc kubenswrapper[4876]: I1215 08:10:51.834143 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fffv5" event={"ID":"3a9f8303-db6e-464a-a67e-90cdda78c138","Type":"ContainerDied","Data":"81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52"} Dec 15 08:10:52 crc kubenswrapper[4876]: I1215 08:10:52.843763 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fffv5" event={"ID":"3a9f8303-db6e-464a-a67e-90cdda78c138","Type":"ContainerStarted","Data":"395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473"} Dec 15 08:10:52 crc kubenswrapper[4876]: I1215 08:10:52.862172 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fffv5" podStartSLOduration=2.096550811 podStartE2EDuration="4.862157043s" podCreationTimestamp="2025-12-15 08:10:48 +0000 UTC" firstStartedPulling="2025-12-15 08:10:49.821146839 +0000 UTC m=+4775.392289750" lastFinishedPulling="2025-12-15 08:10:52.586753071 +0000 UTC m=+4778.157895982" observedRunningTime="2025-12-15 08:10:52.861569086 +0000 UTC m=+4778.432711997" watchObservedRunningTime="2025-12-15 08:10:52.862157043 +0000 UTC m=+4778.433299954" Dec 15 08:10:57 crc kubenswrapper[4876]: I1215 08:10:57.322502 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:10:57 crc kubenswrapper[4876]: I1215 08:10:57.323755 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:10:57 crc kubenswrapper[4876]: I1215 08:10:57.323878 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 08:10:57 crc kubenswrapper[4876]: I1215 08:10:57.324609 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"36f38b36521b00f9a4d0ec44d72cef737ce209760cb13918b814a3cb5214224b"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 08:10:57 crc kubenswrapper[4876]: I1215 08:10:57.324824 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://36f38b36521b00f9a4d0ec44d72cef737ce209760cb13918b814a3cb5214224b" gracePeriod=600 Dec 15 08:10:57 crc kubenswrapper[4876]: I1215 08:10:57.876900 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="36f38b36521b00f9a4d0ec44d72cef737ce209760cb13918b814a3cb5214224b" exitCode=0 Dec 15 08:10:57 crc kubenswrapper[4876]: I1215 08:10:57.876973 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"36f38b36521b00f9a4d0ec44d72cef737ce209760cb13918b814a3cb5214224b"} Dec 15 08:10:57 crc kubenswrapper[4876]: I1215 08:10:57.877516 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb"} Dec 15 08:10:57 crc kubenswrapper[4876]: I1215 08:10:57.877542 4876 scope.go:117] "RemoveContainer" containerID="64363e6c10010662a449a08a6300b80a9be7a39afa2f8efbcacd0bbcdd28b22c" Dec 15 08:10:58 crc kubenswrapper[4876]: I1215 08:10:58.713941 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:58 crc kubenswrapper[4876]: I1215 08:10:58.714016 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:58 crc kubenswrapper[4876]: I1215 08:10:58.753896 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:58 crc kubenswrapper[4876]: I1215 08:10:58.931936 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:10:59 crc kubenswrapper[4876]: I1215 08:10:59.024628 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fffv5"] Dec 15 08:11:00 crc kubenswrapper[4876]: I1215 08:11:00.896959 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fffv5" podUID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerName="registry-server" containerID="cri-o://395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473" gracePeriod=2 Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.278987 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.447167 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vp5wj\" (UniqueName: \"kubernetes.io/projected/3a9f8303-db6e-464a-a67e-90cdda78c138-kube-api-access-vp5wj\") pod \"3a9f8303-db6e-464a-a67e-90cdda78c138\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.447219 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-utilities\") pod \"3a9f8303-db6e-464a-a67e-90cdda78c138\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.447385 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-catalog-content\") pod \"3a9f8303-db6e-464a-a67e-90cdda78c138\" (UID: \"3a9f8303-db6e-464a-a67e-90cdda78c138\") " Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.449149 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-utilities" (OuterVolumeSpecName: "utilities") pod "3a9f8303-db6e-464a-a67e-90cdda78c138" (UID: "3a9f8303-db6e-464a-a67e-90cdda78c138"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.455502 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a9f8303-db6e-464a-a67e-90cdda78c138-kube-api-access-vp5wj" (OuterVolumeSpecName: "kube-api-access-vp5wj") pod "3a9f8303-db6e-464a-a67e-90cdda78c138" (UID: "3a9f8303-db6e-464a-a67e-90cdda78c138"). InnerVolumeSpecName "kube-api-access-vp5wj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.506512 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a9f8303-db6e-464a-a67e-90cdda78c138" (UID: "3a9f8303-db6e-464a-a67e-90cdda78c138"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.548683 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.548724 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a9f8303-db6e-464a-a67e-90cdda78c138-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.548733 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vp5wj\" (UniqueName: \"kubernetes.io/projected/3a9f8303-db6e-464a-a67e-90cdda78c138-kube-api-access-vp5wj\") on node \"crc\" DevicePath \"\"" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.906029 4876 generic.go:334] "Generic (PLEG): container finished" podID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerID="395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473" exitCode=0 Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.906066 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fffv5" event={"ID":"3a9f8303-db6e-464a-a67e-90cdda78c138","Type":"ContainerDied","Data":"395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473"} Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.906404 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fffv5" event={"ID":"3a9f8303-db6e-464a-a67e-90cdda78c138","Type":"ContainerDied","Data":"2e2e9507ce85db597d6ce323d0a53643696b22992ca00671057e232279383b03"} Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.906454 4876 scope.go:117] "RemoveContainer" containerID="395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.906149 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fffv5" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.928651 4876 scope.go:117] "RemoveContainer" containerID="81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.953853 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fffv5"] Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.959577 4876 scope.go:117] "RemoveContainer" containerID="24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.961646 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fffv5"] Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.977633 4876 scope.go:117] "RemoveContainer" containerID="395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473" Dec 15 08:11:01 crc kubenswrapper[4876]: E1215 08:11:01.978023 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473\": container with ID starting with 395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473 not found: ID does not exist" containerID="395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.978068 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473"} err="failed to get container status \"395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473\": rpc error: code = NotFound desc = could not find container \"395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473\": container with ID starting with 395ca6edac57e6c2e9c6c6db2f058fcbb95a7904caaf348df79f66d4c3ce5473 not found: ID does not exist" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.978092 4876 scope.go:117] "RemoveContainer" containerID="81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52" Dec 15 08:11:01 crc kubenswrapper[4876]: E1215 08:11:01.978642 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52\": container with ID starting with 81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52 not found: ID does not exist" containerID="81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.978678 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52"} err="failed to get container status \"81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52\": rpc error: code = NotFound desc = could not find container \"81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52\": container with ID starting with 81862753c6e40b9b41a6f4846834013e9676ec50c07dd199f4f78331e2211e52 not found: ID does not exist" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.978706 4876 scope.go:117] "RemoveContainer" containerID="24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696" Dec 15 08:11:01 crc kubenswrapper[4876]: E1215 08:11:01.979127 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696\": container with ID starting with 24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696 not found: ID does not exist" containerID="24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696" Dec 15 08:11:01 crc kubenswrapper[4876]: I1215 08:11:01.979159 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696"} err="failed to get container status \"24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696\": rpc error: code = NotFound desc = could not find container \"24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696\": container with ID starting with 24f6a130bd139d9e5a5c158a6257a7958ae18abbc724a977f45b86ce5e9ff696 not found: ID does not exist" Dec 15 08:11:02 crc kubenswrapper[4876]: I1215 08:11:02.713796 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a9f8303-db6e-464a-a67e-90cdda78c138" path="/var/lib/kubelet/pods/3a9f8303-db6e-464a-a67e-90cdda78c138/volumes" Dec 15 08:12:57 crc kubenswrapper[4876]: I1215 08:12:57.323023 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:12:57 crc kubenswrapper[4876]: I1215 08:12:57.323624 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:13:27 crc kubenswrapper[4876]: I1215 08:13:27.322829 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:13:27 crc kubenswrapper[4876]: I1215 08:13:27.323513 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:13:57 crc kubenswrapper[4876]: I1215 08:13:57.324208 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:13:57 crc kubenswrapper[4876]: I1215 08:13:57.325698 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:13:57 crc kubenswrapper[4876]: I1215 08:13:57.325822 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 08:13:57 crc kubenswrapper[4876]: I1215 08:13:57.326617 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 08:13:57 crc kubenswrapper[4876]: I1215 08:13:57.326746 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" gracePeriod=600 Dec 15 08:13:57 crc kubenswrapper[4876]: E1215 08:13:57.445454 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:13:57 crc kubenswrapper[4876]: I1215 08:13:57.753720 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" exitCode=0 Dec 15 08:13:57 crc kubenswrapper[4876]: I1215 08:13:57.753866 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb"} Dec 15 08:13:57 crc kubenswrapper[4876]: I1215 08:13:57.754093 4876 scope.go:117] "RemoveContainer" containerID="36f38b36521b00f9a4d0ec44d72cef737ce209760cb13918b814a3cb5214224b" Dec 15 08:13:57 crc kubenswrapper[4876]: I1215 08:13:57.758539 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:13:57 crc kubenswrapper[4876]: E1215 08:13:57.758969 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:14:10 crc kubenswrapper[4876]: I1215 08:14:10.705313 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:14:10 crc kubenswrapper[4876]: E1215 08:14:10.706072 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:14:21 crc kubenswrapper[4876]: I1215 08:14:21.705677 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:14:21 crc kubenswrapper[4876]: E1215 08:14:21.706251 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:14:35 crc kubenswrapper[4876]: I1215 08:14:35.705672 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:14:35 crc kubenswrapper[4876]: E1215 08:14:35.706455 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:14:46 crc kubenswrapper[4876]: I1215 08:14:46.705694 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:14:46 crc kubenswrapper[4876]: E1215 08:14:46.706435 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:14:58 crc kubenswrapper[4876]: I1215 08:14:58.705545 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:14:58 crc kubenswrapper[4876]: E1215 08:14:58.706089 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.148497 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg"] Dec 15 08:15:00 crc kubenswrapper[4876]: E1215 08:15:00.149449 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerName="extract-content" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.149488 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerName="extract-content" Dec 15 08:15:00 crc kubenswrapper[4876]: E1215 08:15:00.149501 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerName="extract-utilities" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.149509 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerName="extract-utilities" Dec 15 08:15:00 crc kubenswrapper[4876]: E1215 08:15:00.149520 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerName="registry-server" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.149525 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerName="registry-server" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.149712 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a9f8303-db6e-464a-a67e-90cdda78c138" containerName="registry-server" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.150411 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.152424 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.153202 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.165930 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg"] Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.250488 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e0341cdd-2d11-4530-8ba9-300fd9c900b6-secret-volume\") pod \"collect-profiles-29429775-5pnbg\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.250546 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8d75\" (UniqueName: \"kubernetes.io/projected/e0341cdd-2d11-4530-8ba9-300fd9c900b6-kube-api-access-s8d75\") pod \"collect-profiles-29429775-5pnbg\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.250775 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e0341cdd-2d11-4530-8ba9-300fd9c900b6-config-volume\") pod \"collect-profiles-29429775-5pnbg\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.352378 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e0341cdd-2d11-4530-8ba9-300fd9c900b6-config-volume\") pod \"collect-profiles-29429775-5pnbg\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.352504 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e0341cdd-2d11-4530-8ba9-300fd9c900b6-secret-volume\") pod \"collect-profiles-29429775-5pnbg\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.352540 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8d75\" (UniqueName: \"kubernetes.io/projected/e0341cdd-2d11-4530-8ba9-300fd9c900b6-kube-api-access-s8d75\") pod \"collect-profiles-29429775-5pnbg\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.353688 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e0341cdd-2d11-4530-8ba9-300fd9c900b6-config-volume\") pod \"collect-profiles-29429775-5pnbg\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.364906 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e0341cdd-2d11-4530-8ba9-300fd9c900b6-secret-volume\") pod \"collect-profiles-29429775-5pnbg\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.369165 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8d75\" (UniqueName: \"kubernetes.io/projected/e0341cdd-2d11-4530-8ba9-300fd9c900b6-kube-api-access-s8d75\") pod \"collect-profiles-29429775-5pnbg\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.473753 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:00 crc kubenswrapper[4876]: I1215 08:15:00.884347 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg"] Dec 15 08:15:01 crc kubenswrapper[4876]: I1215 08:15:01.220268 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" event={"ID":"e0341cdd-2d11-4530-8ba9-300fd9c900b6","Type":"ContainerStarted","Data":"f2c98e708fccc39766fbff57d8e95ae990cec521ed3554ee35fc6d4ac95612e4"} Dec 15 08:15:01 crc kubenswrapper[4876]: I1215 08:15:01.220577 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" event={"ID":"e0341cdd-2d11-4530-8ba9-300fd9c900b6","Type":"ContainerStarted","Data":"de1d56ec1a64f19f0a3ccefcd3ac0b285fcdd5eaa96c2d7f293313d43a4faef2"} Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.229006 4876 generic.go:334] "Generic (PLEG): container finished" podID="e0341cdd-2d11-4530-8ba9-300fd9c900b6" containerID="f2c98e708fccc39766fbff57d8e95ae990cec521ed3554ee35fc6d4ac95612e4" exitCode=0 Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.229052 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" event={"ID":"e0341cdd-2d11-4530-8ba9-300fd9c900b6","Type":"ContainerDied","Data":"f2c98e708fccc39766fbff57d8e95ae990cec521ed3554ee35fc6d4ac95612e4"} Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.552760 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.585935 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e0341cdd-2d11-4530-8ba9-300fd9c900b6-secret-volume\") pod \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.586299 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e0341cdd-2d11-4530-8ba9-300fd9c900b6-config-volume\") pod \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.586396 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8d75\" (UniqueName: \"kubernetes.io/projected/e0341cdd-2d11-4530-8ba9-300fd9c900b6-kube-api-access-s8d75\") pod \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\" (UID: \"e0341cdd-2d11-4530-8ba9-300fd9c900b6\") " Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.588506 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0341cdd-2d11-4530-8ba9-300fd9c900b6-config-volume" (OuterVolumeSpecName: "config-volume") pod "e0341cdd-2d11-4530-8ba9-300fd9c900b6" (UID: "e0341cdd-2d11-4530-8ba9-300fd9c900b6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.600938 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0341cdd-2d11-4530-8ba9-300fd9c900b6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e0341cdd-2d11-4530-8ba9-300fd9c900b6" (UID: "e0341cdd-2d11-4530-8ba9-300fd9c900b6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.607698 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0341cdd-2d11-4530-8ba9-300fd9c900b6-kube-api-access-s8d75" (OuterVolumeSpecName: "kube-api-access-s8d75") pod "e0341cdd-2d11-4530-8ba9-300fd9c900b6" (UID: "e0341cdd-2d11-4530-8ba9-300fd9c900b6"). InnerVolumeSpecName "kube-api-access-s8d75". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.688200 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e0341cdd-2d11-4530-8ba9-300fd9c900b6-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.688235 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e0341cdd-2d11-4530-8ba9-300fd9c900b6-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 08:15:02 crc kubenswrapper[4876]: I1215 08:15:02.688256 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8d75\" (UniqueName: \"kubernetes.io/projected/e0341cdd-2d11-4530-8ba9-300fd9c900b6-kube-api-access-s8d75\") on node \"crc\" DevicePath \"\"" Dec 15 08:15:03 crc kubenswrapper[4876]: I1215 08:15:03.236038 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" event={"ID":"e0341cdd-2d11-4530-8ba9-300fd9c900b6","Type":"ContainerDied","Data":"de1d56ec1a64f19f0a3ccefcd3ac0b285fcdd5eaa96c2d7f293313d43a4faef2"} Dec 15 08:15:03 crc kubenswrapper[4876]: I1215 08:15:03.236430 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de1d56ec1a64f19f0a3ccefcd3ac0b285fcdd5eaa96c2d7f293313d43a4faef2" Dec 15 08:15:03 crc kubenswrapper[4876]: I1215 08:15:03.236166 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg" Dec 15 08:15:03 crc kubenswrapper[4876]: I1215 08:15:03.629003 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl"] Dec 15 08:15:03 crc kubenswrapper[4876]: I1215 08:15:03.635241 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429730-74xbl"] Dec 15 08:15:04 crc kubenswrapper[4876]: I1215 08:15:04.714677 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82f1250f-9f77-46f6-9332-9639e443ad70" path="/var/lib/kubelet/pods/82f1250f-9f77-46f6-9332-9639e443ad70/volumes" Dec 15 08:15:12 crc kubenswrapper[4876]: I1215 08:15:12.705999 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:15:12 crc kubenswrapper[4876]: E1215 08:15:12.707170 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:15:22 crc kubenswrapper[4876]: I1215 08:15:22.123566 4876 scope.go:117] "RemoveContainer" containerID="8c25af4b74315f842b92ecc56be19e77ca2d7032d5e3123c31a1caeb74cfc033" Dec 15 08:15:24 crc kubenswrapper[4876]: I1215 08:15:24.709991 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:15:24 crc kubenswrapper[4876]: E1215 08:15:24.710580 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:15:34 crc kubenswrapper[4876]: I1215 08:15:34.896580 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-frr4g"] Dec 15 08:15:34 crc kubenswrapper[4876]: E1215 08:15:34.897748 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0341cdd-2d11-4530-8ba9-300fd9c900b6" containerName="collect-profiles" Dec 15 08:15:34 crc kubenswrapper[4876]: I1215 08:15:34.897762 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0341cdd-2d11-4530-8ba9-300fd9c900b6" containerName="collect-profiles" Dec 15 08:15:34 crc kubenswrapper[4876]: I1215 08:15:34.897927 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0341cdd-2d11-4530-8ba9-300fd9c900b6" containerName="collect-profiles" Dec 15 08:15:34 crc kubenswrapper[4876]: I1215 08:15:34.899154 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:34 crc kubenswrapper[4876]: I1215 08:15:34.910864 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-842mn\" (UniqueName: \"kubernetes.io/projected/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-kube-api-access-842mn\") pod \"community-operators-frr4g\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:34 crc kubenswrapper[4876]: I1215 08:15:34.910989 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-catalog-content\") pod \"community-operators-frr4g\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:34 crc kubenswrapper[4876]: I1215 08:15:34.911018 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-utilities\") pod \"community-operators-frr4g\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:34 crc kubenswrapper[4876]: I1215 08:15:34.927033 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-frr4g"] Dec 15 08:15:35 crc kubenswrapper[4876]: I1215 08:15:35.011444 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-catalog-content\") pod \"community-operators-frr4g\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:35 crc kubenswrapper[4876]: I1215 08:15:35.011491 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-utilities\") pod \"community-operators-frr4g\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:35 crc kubenswrapper[4876]: I1215 08:15:35.011543 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-842mn\" (UniqueName: \"kubernetes.io/projected/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-kube-api-access-842mn\") pod \"community-operators-frr4g\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:35 crc kubenswrapper[4876]: I1215 08:15:35.012510 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-utilities\") pod \"community-operators-frr4g\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:35 crc kubenswrapper[4876]: I1215 08:15:35.012639 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-catalog-content\") pod \"community-operators-frr4g\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:35 crc kubenswrapper[4876]: I1215 08:15:35.035223 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-842mn\" (UniqueName: \"kubernetes.io/projected/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-kube-api-access-842mn\") pod \"community-operators-frr4g\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:35 crc kubenswrapper[4876]: I1215 08:15:35.227382 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:35 crc kubenswrapper[4876]: I1215 08:15:35.713430 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-frr4g"] Dec 15 08:15:36 crc kubenswrapper[4876]: I1215 08:15:36.468353 4876 generic.go:334] "Generic (PLEG): container finished" podID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerID="01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca" exitCode=0 Dec 15 08:15:36 crc kubenswrapper[4876]: I1215 08:15:36.468449 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frr4g" event={"ID":"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7","Type":"ContainerDied","Data":"01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca"} Dec 15 08:15:36 crc kubenswrapper[4876]: I1215 08:15:36.468678 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frr4g" event={"ID":"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7","Type":"ContainerStarted","Data":"62ffe8b81bb133ce1e0fe1b68dc5c49ff2e42375baad6c2db90cbb4197bfaec1"} Dec 15 08:15:37 crc kubenswrapper[4876]: I1215 08:15:37.478279 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frr4g" event={"ID":"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7","Type":"ContainerStarted","Data":"4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f"} Dec 15 08:15:37 crc kubenswrapper[4876]: I1215 08:15:37.706789 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:15:37 crc kubenswrapper[4876]: E1215 08:15:37.707141 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:15:38 crc kubenswrapper[4876]: I1215 08:15:38.487613 4876 generic.go:334] "Generic (PLEG): container finished" podID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerID="4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f" exitCode=0 Dec 15 08:15:38 crc kubenswrapper[4876]: I1215 08:15:38.487678 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frr4g" event={"ID":"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7","Type":"ContainerDied","Data":"4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f"} Dec 15 08:15:39 crc kubenswrapper[4876]: I1215 08:15:39.499997 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frr4g" event={"ID":"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7","Type":"ContainerStarted","Data":"2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8"} Dec 15 08:15:39 crc kubenswrapper[4876]: I1215 08:15:39.528924 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-frr4g" podStartSLOduration=3.067206372 podStartE2EDuration="5.528862046s" podCreationTimestamp="2025-12-15 08:15:34 +0000 UTC" firstStartedPulling="2025-12-15 08:15:36.470568966 +0000 UTC m=+5062.041711877" lastFinishedPulling="2025-12-15 08:15:38.93222465 +0000 UTC m=+5064.503367551" observedRunningTime="2025-12-15 08:15:39.522977507 +0000 UTC m=+5065.094120418" watchObservedRunningTime="2025-12-15 08:15:39.528862046 +0000 UTC m=+5065.100004967" Dec 15 08:15:45 crc kubenswrapper[4876]: I1215 08:15:45.227566 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:45 crc kubenswrapper[4876]: I1215 08:15:45.228416 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:45 crc kubenswrapper[4876]: I1215 08:15:45.273575 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:45 crc kubenswrapper[4876]: I1215 08:15:45.579010 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:45 crc kubenswrapper[4876]: I1215 08:15:45.626476 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-frr4g"] Dec 15 08:15:47 crc kubenswrapper[4876]: I1215 08:15:47.551229 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-frr4g" podUID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerName="registry-server" containerID="cri-o://2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8" gracePeriod=2 Dec 15 08:15:47 crc kubenswrapper[4876]: I1215 08:15:47.973850 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.104316 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-842mn\" (UniqueName: \"kubernetes.io/projected/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-kube-api-access-842mn\") pod \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.104486 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-catalog-content\") pod \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.104628 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-utilities\") pod \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\" (UID: \"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7\") " Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.105680 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-utilities" (OuterVolumeSpecName: "utilities") pod "19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" (UID: "19c4cc3c-c16a-4dbb-827e-ffa6b28861c7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.112871 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-kube-api-access-842mn" (OuterVolumeSpecName: "kube-api-access-842mn") pod "19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" (UID: "19c4cc3c-c16a-4dbb-827e-ffa6b28861c7"). InnerVolumeSpecName "kube-api-access-842mn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.154791 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" (UID: "19c4cc3c-c16a-4dbb-827e-ffa6b28861c7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.206083 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.206144 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-842mn\" (UniqueName: \"kubernetes.io/projected/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-kube-api-access-842mn\") on node \"crc\" DevicePath \"\"" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.206159 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.558995 4876 generic.go:334] "Generic (PLEG): container finished" podID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerID="2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8" exitCode=0 Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.559053 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-frr4g" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.559079 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frr4g" event={"ID":"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7","Type":"ContainerDied","Data":"2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8"} Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.559324 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-frr4g" event={"ID":"19c4cc3c-c16a-4dbb-827e-ffa6b28861c7","Type":"ContainerDied","Data":"62ffe8b81bb133ce1e0fe1b68dc5c49ff2e42375baad6c2db90cbb4197bfaec1"} Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.559350 4876 scope.go:117] "RemoveContainer" containerID="2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.588265 4876 scope.go:117] "RemoveContainer" containerID="4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.590895 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-frr4g"] Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.601541 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-frr4g"] Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.614167 4876 scope.go:117] "RemoveContainer" containerID="01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.635501 4876 scope.go:117] "RemoveContainer" containerID="2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8" Dec 15 08:15:48 crc kubenswrapper[4876]: E1215 08:15:48.636020 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8\": container with ID starting with 2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8 not found: ID does not exist" containerID="2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.636075 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8"} err="failed to get container status \"2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8\": rpc error: code = NotFound desc = could not find container \"2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8\": container with ID starting with 2711661b35b16e35e13514f6f0523e1e9e0bf1403c8aae4f4572124d6384fca8 not found: ID does not exist" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.636127 4876 scope.go:117] "RemoveContainer" containerID="4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f" Dec 15 08:15:48 crc kubenswrapper[4876]: E1215 08:15:48.636617 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f\": container with ID starting with 4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f not found: ID does not exist" containerID="4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.636657 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f"} err="failed to get container status \"4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f\": rpc error: code = NotFound desc = could not find container \"4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f\": container with ID starting with 4b4207000e80f796b59841a562ea1c4439a71fe1fb3fa23464a98d940ac0368f not found: ID does not exist" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.636681 4876 scope.go:117] "RemoveContainer" containerID="01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca" Dec 15 08:15:48 crc kubenswrapper[4876]: E1215 08:15:48.637310 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca\": container with ID starting with 01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca not found: ID does not exist" containerID="01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.637344 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca"} err="failed to get container status \"01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca\": rpc error: code = NotFound desc = could not find container \"01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca\": container with ID starting with 01d2cb7459bfae6d7642c268a6facbea577ab08a9aa46585f5b8ce8e87d380ca not found: ID does not exist" Dec 15 08:15:48 crc kubenswrapper[4876]: E1215 08:15:48.656593 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19c4cc3c_c16a_4dbb_827e_ffa6b28861c7.slice\": RecentStats: unable to find data in memory cache]" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.705596 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:15:48 crc kubenswrapper[4876]: E1215 08:15:48.705989 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:15:48 crc kubenswrapper[4876]: I1215 08:15:48.714042 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" path="/var/lib/kubelet/pods/19c4cc3c-c16a-4dbb-827e-ffa6b28861c7/volumes" Dec 15 08:16:03 crc kubenswrapper[4876]: I1215 08:16:03.705805 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:16:03 crc kubenswrapper[4876]: E1215 08:16:03.706582 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:16:18 crc kubenswrapper[4876]: I1215 08:16:18.705067 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:16:18 crc kubenswrapper[4876]: E1215 08:16:18.705924 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:16:33 crc kubenswrapper[4876]: I1215 08:16:33.706048 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:16:33 crc kubenswrapper[4876]: E1215 08:16:33.706982 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:16:44 crc kubenswrapper[4876]: I1215 08:16:44.711263 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:16:44 crc kubenswrapper[4876]: E1215 08:16:44.712384 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:16:58 crc kubenswrapper[4876]: I1215 08:16:58.705268 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:16:58 crc kubenswrapper[4876]: E1215 08:16:58.706050 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:17:09 crc kubenswrapper[4876]: I1215 08:17:09.705463 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:17:09 crc kubenswrapper[4876]: E1215 08:17:09.706064 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:17:22 crc kubenswrapper[4876]: I1215 08:17:22.706115 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:17:22 crc kubenswrapper[4876]: E1215 08:17:22.707381 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:17:35 crc kubenswrapper[4876]: I1215 08:17:35.706445 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:17:35 crc kubenswrapper[4876]: E1215 08:17:35.707362 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:17:49 crc kubenswrapper[4876]: I1215 08:17:49.705247 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:17:49 crc kubenswrapper[4876]: E1215 08:17:49.706188 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:18:02 crc kubenswrapper[4876]: I1215 08:18:02.706229 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:18:02 crc kubenswrapper[4876]: E1215 08:18:02.707066 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:18:15 crc kubenswrapper[4876]: I1215 08:18:15.705797 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:18:15 crc kubenswrapper[4876]: E1215 08:18:15.706646 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:18:29 crc kubenswrapper[4876]: I1215 08:18:29.706233 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:18:29 crc kubenswrapper[4876]: E1215 08:18:29.707406 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:18:44 crc kubenswrapper[4876]: I1215 08:18:44.713169 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:18:44 crc kubenswrapper[4876]: E1215 08:18:44.714062 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:18:56 crc kubenswrapper[4876]: I1215 08:18:56.705809 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:18:56 crc kubenswrapper[4876]: E1215 08:18:56.706810 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:19:08 crc kubenswrapper[4876]: I1215 08:19:08.705345 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:19:08 crc kubenswrapper[4876]: I1215 08:19:08.885135 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"45133dc5d533de364371d0fb936eb97fa02805ba5042019bd072892b716669da"} Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.392477 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7hvhn"] Dec 15 08:20:24 crc kubenswrapper[4876]: E1215 08:20:24.393621 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerName="extract-content" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.393642 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerName="extract-content" Dec 15 08:20:24 crc kubenswrapper[4876]: E1215 08:20:24.393659 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerName="registry-server" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.393670 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerName="registry-server" Dec 15 08:20:24 crc kubenswrapper[4876]: E1215 08:20:24.393702 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerName="extract-utilities" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.393713 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerName="extract-utilities" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.393923 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="19c4cc3c-c16a-4dbb-827e-ffa6b28861c7" containerName="registry-server" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.395535 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.415279 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7hvhn"] Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.523435 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw9jc\" (UniqueName: \"kubernetes.io/projected/872cb342-cad7-4053-84e6-f92573320ef9-kube-api-access-zw9jc\") pod \"redhat-marketplace-7hvhn\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.523529 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-catalog-content\") pod \"redhat-marketplace-7hvhn\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.523598 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-utilities\") pod \"redhat-marketplace-7hvhn\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.625082 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-utilities\") pod \"redhat-marketplace-7hvhn\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.625164 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw9jc\" (UniqueName: \"kubernetes.io/projected/872cb342-cad7-4053-84e6-f92573320ef9-kube-api-access-zw9jc\") pod \"redhat-marketplace-7hvhn\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.625218 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-catalog-content\") pod \"redhat-marketplace-7hvhn\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.625661 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-utilities\") pod \"redhat-marketplace-7hvhn\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.625734 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-catalog-content\") pod \"redhat-marketplace-7hvhn\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.647004 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw9jc\" (UniqueName: \"kubernetes.io/projected/872cb342-cad7-4053-84e6-f92573320ef9-kube-api-access-zw9jc\") pod \"redhat-marketplace-7hvhn\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:24 crc kubenswrapper[4876]: I1215 08:20:24.714734 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:25 crc kubenswrapper[4876]: I1215 08:20:25.148060 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7hvhn"] Dec 15 08:20:25 crc kubenswrapper[4876]: I1215 08:20:25.466057 4876 generic.go:334] "Generic (PLEG): container finished" podID="872cb342-cad7-4053-84e6-f92573320ef9" containerID="a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c" exitCode=0 Dec 15 08:20:25 crc kubenswrapper[4876]: I1215 08:20:25.466122 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7hvhn" event={"ID":"872cb342-cad7-4053-84e6-f92573320ef9","Type":"ContainerDied","Data":"a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c"} Dec 15 08:20:25 crc kubenswrapper[4876]: I1215 08:20:25.466152 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7hvhn" event={"ID":"872cb342-cad7-4053-84e6-f92573320ef9","Type":"ContainerStarted","Data":"808c22e4adfc9a0f928f3c03780922d283d2e3e0cd71f20044352454f37b9aab"} Dec 15 08:20:25 crc kubenswrapper[4876]: I1215 08:20:25.467710 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 08:20:27 crc kubenswrapper[4876]: I1215 08:20:27.493024 4876 generic.go:334] "Generic (PLEG): container finished" podID="872cb342-cad7-4053-84e6-f92573320ef9" containerID="198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1" exitCode=0 Dec 15 08:20:27 crc kubenswrapper[4876]: I1215 08:20:27.493548 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7hvhn" event={"ID":"872cb342-cad7-4053-84e6-f92573320ef9","Type":"ContainerDied","Data":"198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1"} Dec 15 08:20:28 crc kubenswrapper[4876]: I1215 08:20:28.501382 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7hvhn" event={"ID":"872cb342-cad7-4053-84e6-f92573320ef9","Type":"ContainerStarted","Data":"3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e"} Dec 15 08:20:28 crc kubenswrapper[4876]: I1215 08:20:28.528280 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7hvhn" podStartSLOduration=1.912951168 podStartE2EDuration="4.528225096s" podCreationTimestamp="2025-12-15 08:20:24 +0000 UTC" firstStartedPulling="2025-12-15 08:20:25.467425 +0000 UTC m=+5351.038567921" lastFinishedPulling="2025-12-15 08:20:28.082698948 +0000 UTC m=+5353.653841849" observedRunningTime="2025-12-15 08:20:28.520580531 +0000 UTC m=+5354.091723482" watchObservedRunningTime="2025-12-15 08:20:28.528225096 +0000 UTC m=+5354.099368007" Dec 15 08:20:34 crc kubenswrapper[4876]: I1215 08:20:34.715323 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:34 crc kubenswrapper[4876]: I1215 08:20:34.715889 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:34 crc kubenswrapper[4876]: I1215 08:20:34.754647 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:35 crc kubenswrapper[4876]: I1215 08:20:35.596071 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:35 crc kubenswrapper[4876]: I1215 08:20:35.643832 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7hvhn"] Dec 15 08:20:37 crc kubenswrapper[4876]: I1215 08:20:37.559734 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7hvhn" podUID="872cb342-cad7-4053-84e6-f92573320ef9" containerName="registry-server" containerID="cri-o://3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e" gracePeriod=2 Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.505720 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.572848 4876 generic.go:334] "Generic (PLEG): container finished" podID="872cb342-cad7-4053-84e6-f92573320ef9" containerID="3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e" exitCode=0 Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.572912 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7hvhn" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.572907 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7hvhn" event={"ID":"872cb342-cad7-4053-84e6-f92573320ef9","Type":"ContainerDied","Data":"3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e"} Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.572977 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7hvhn" event={"ID":"872cb342-cad7-4053-84e6-f92573320ef9","Type":"ContainerDied","Data":"808c22e4adfc9a0f928f3c03780922d283d2e3e0cd71f20044352454f37b9aab"} Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.572999 4876 scope.go:117] "RemoveContainer" containerID="3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.593125 4876 scope.go:117] "RemoveContainer" containerID="198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.609883 4876 scope.go:117] "RemoveContainer" containerID="a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.618691 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-catalog-content\") pod \"872cb342-cad7-4053-84e6-f92573320ef9\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.618754 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-utilities\") pod \"872cb342-cad7-4053-84e6-f92573320ef9\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.618871 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw9jc\" (UniqueName: \"kubernetes.io/projected/872cb342-cad7-4053-84e6-f92573320ef9-kube-api-access-zw9jc\") pod \"872cb342-cad7-4053-84e6-f92573320ef9\" (UID: \"872cb342-cad7-4053-84e6-f92573320ef9\") " Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.619816 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-utilities" (OuterVolumeSpecName: "utilities") pod "872cb342-cad7-4053-84e6-f92573320ef9" (UID: "872cb342-cad7-4053-84e6-f92573320ef9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.625296 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/872cb342-cad7-4053-84e6-f92573320ef9-kube-api-access-zw9jc" (OuterVolumeSpecName: "kube-api-access-zw9jc") pod "872cb342-cad7-4053-84e6-f92573320ef9" (UID: "872cb342-cad7-4053-84e6-f92573320ef9"). InnerVolumeSpecName "kube-api-access-zw9jc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.637579 4876 scope.go:117] "RemoveContainer" containerID="3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e" Dec 15 08:20:38 crc kubenswrapper[4876]: E1215 08:20:38.638059 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e\": container with ID starting with 3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e not found: ID does not exist" containerID="3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.638185 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e"} err="failed to get container status \"3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e\": rpc error: code = NotFound desc = could not find container \"3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e\": container with ID starting with 3a06b9e3283611e872a8d1835d90cde929ee062f5e160339910a21e6b3bbc05e not found: ID does not exist" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.638221 4876 scope.go:117] "RemoveContainer" containerID="198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1" Dec 15 08:20:38 crc kubenswrapper[4876]: E1215 08:20:38.638732 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1\": container with ID starting with 198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1 not found: ID does not exist" containerID="198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.638787 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1"} err="failed to get container status \"198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1\": rpc error: code = NotFound desc = could not find container \"198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1\": container with ID starting with 198e75b6b0b2d60fb06280bd2148cb1866553e09332ffebe162f29d9a96408a1 not found: ID does not exist" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.638823 4876 scope.go:117] "RemoveContainer" containerID="a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c" Dec 15 08:20:38 crc kubenswrapper[4876]: E1215 08:20:38.639165 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c\": container with ID starting with a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c not found: ID does not exist" containerID="a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.639195 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c"} err="failed to get container status \"a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c\": rpc error: code = NotFound desc = could not find container \"a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c\": container with ID starting with a8cd119d8911830df76b32ffd0b000c3efeecabbf68a4f713ae6a618ae1af82c not found: ID does not exist" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.642212 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "872cb342-cad7-4053-84e6-f92573320ef9" (UID: "872cb342-cad7-4053-84e6-f92573320ef9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.721039 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.721078 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/872cb342-cad7-4053-84e6-f92573320ef9-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.721133 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw9jc\" (UniqueName: \"kubernetes.io/projected/872cb342-cad7-4053-84e6-f92573320ef9-kube-api-access-zw9jc\") on node \"crc\" DevicePath \"\"" Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.901172 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7hvhn"] Dec 15 08:20:38 crc kubenswrapper[4876]: I1215 08:20:38.908872 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7hvhn"] Dec 15 08:20:40 crc kubenswrapper[4876]: I1215 08:20:40.714739 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="872cb342-cad7-4053-84e6-f92573320ef9" path="/var/lib/kubelet/pods/872cb342-cad7-4053-84e6-f92573320ef9/volumes" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.057061 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qt59k"] Dec 15 08:21:24 crc kubenswrapper[4876]: E1215 08:21:24.058856 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="872cb342-cad7-4053-84e6-f92573320ef9" containerName="registry-server" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.058986 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="872cb342-cad7-4053-84e6-f92573320ef9" containerName="registry-server" Dec 15 08:21:24 crc kubenswrapper[4876]: E1215 08:21:24.059064 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="872cb342-cad7-4053-84e6-f92573320ef9" containerName="extract-utilities" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.059159 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="872cb342-cad7-4053-84e6-f92573320ef9" containerName="extract-utilities" Dec 15 08:21:24 crc kubenswrapper[4876]: E1215 08:21:24.059236 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="872cb342-cad7-4053-84e6-f92573320ef9" containerName="extract-content" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.059296 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="872cb342-cad7-4053-84e6-f92573320ef9" containerName="extract-content" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.059481 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="872cb342-cad7-4053-84e6-f92573320ef9" containerName="registry-server" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.060641 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.078306 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qt59k"] Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.167290 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhf7s\" (UniqueName: \"kubernetes.io/projected/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-kube-api-access-jhf7s\") pod \"certified-operators-qt59k\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.167379 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-catalog-content\") pod \"certified-operators-qt59k\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.167528 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-utilities\") pod \"certified-operators-qt59k\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.268288 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-catalog-content\") pod \"certified-operators-qt59k\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.268393 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-utilities\") pod \"certified-operators-qt59k\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.268440 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhf7s\" (UniqueName: \"kubernetes.io/projected/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-kube-api-access-jhf7s\") pod \"certified-operators-qt59k\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.268945 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-catalog-content\") pod \"certified-operators-qt59k\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.268955 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-utilities\") pod \"certified-operators-qt59k\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.288870 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhf7s\" (UniqueName: \"kubernetes.io/projected/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-kube-api-access-jhf7s\") pod \"certified-operators-qt59k\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.379937 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.880077 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qt59k"] Dec 15 08:21:24 crc kubenswrapper[4876]: I1215 08:21:24.887652 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt59k" event={"ID":"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f","Type":"ContainerStarted","Data":"b0a84a28128eb565acdfba176b325133ac61b7a732fa3d16434d0b0bead81583"} Dec 15 08:21:25 crc kubenswrapper[4876]: I1215 08:21:25.897088 4876 generic.go:334] "Generic (PLEG): container finished" podID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerID="f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a" exitCode=0 Dec 15 08:21:25 crc kubenswrapper[4876]: I1215 08:21:25.897234 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt59k" event={"ID":"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f","Type":"ContainerDied","Data":"f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a"} Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.322345 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.322742 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.465726 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xlznd"] Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.467095 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.478019 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xlznd"] Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.624600 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hsqq\" (UniqueName: \"kubernetes.io/projected/28839bf7-6768-4c44-862e-d29281303b49-kube-api-access-2hsqq\") pod \"redhat-operators-xlznd\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.624762 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-utilities\") pod \"redhat-operators-xlznd\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.624839 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-catalog-content\") pod \"redhat-operators-xlznd\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.726591 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-utilities\") pod \"redhat-operators-xlznd\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.726678 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-catalog-content\") pod \"redhat-operators-xlznd\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.726734 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hsqq\" (UniqueName: \"kubernetes.io/projected/28839bf7-6768-4c44-862e-d29281303b49-kube-api-access-2hsqq\") pod \"redhat-operators-xlznd\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.727174 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-utilities\") pod \"redhat-operators-xlznd\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.727291 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-catalog-content\") pod \"redhat-operators-xlznd\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.746737 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hsqq\" (UniqueName: \"kubernetes.io/projected/28839bf7-6768-4c44-862e-d29281303b49-kube-api-access-2hsqq\") pod \"redhat-operators-xlznd\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.796389 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.924196 4876 generic.go:334] "Generic (PLEG): container finished" podID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerID="56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535" exitCode=0 Dec 15 08:21:27 crc kubenswrapper[4876]: I1215 08:21:27.924241 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt59k" event={"ID":"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f","Type":"ContainerDied","Data":"56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535"} Dec 15 08:21:28 crc kubenswrapper[4876]: I1215 08:21:28.042956 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xlznd"] Dec 15 08:21:28 crc kubenswrapper[4876]: I1215 08:21:28.932259 4876 generic.go:334] "Generic (PLEG): container finished" podID="28839bf7-6768-4c44-862e-d29281303b49" containerID="ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956" exitCode=0 Dec 15 08:21:28 crc kubenswrapper[4876]: I1215 08:21:28.932375 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlznd" event={"ID":"28839bf7-6768-4c44-862e-d29281303b49","Type":"ContainerDied","Data":"ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956"} Dec 15 08:21:28 crc kubenswrapper[4876]: I1215 08:21:28.932590 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlznd" event={"ID":"28839bf7-6768-4c44-862e-d29281303b49","Type":"ContainerStarted","Data":"19e7b4077043470c051bbbd0a349a56aa5ccbc8f0c7be4fc6bb65ea8a1c2e8ae"} Dec 15 08:21:28 crc kubenswrapper[4876]: I1215 08:21:28.934689 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt59k" event={"ID":"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f","Type":"ContainerStarted","Data":"cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65"} Dec 15 08:21:28 crc kubenswrapper[4876]: I1215 08:21:28.983315 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qt59k" podStartSLOduration=2.533919728 podStartE2EDuration="4.983297641s" podCreationTimestamp="2025-12-15 08:21:24 +0000 UTC" firstStartedPulling="2025-12-15 08:21:25.899347441 +0000 UTC m=+5411.470490352" lastFinishedPulling="2025-12-15 08:21:28.348725354 +0000 UTC m=+5413.919868265" observedRunningTime="2025-12-15 08:21:28.97841142 +0000 UTC m=+5414.549554331" watchObservedRunningTime="2025-12-15 08:21:28.983297641 +0000 UTC m=+5414.554440552" Dec 15 08:21:30 crc kubenswrapper[4876]: I1215 08:21:30.951189 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlznd" event={"ID":"28839bf7-6768-4c44-862e-d29281303b49","Type":"ContainerStarted","Data":"2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b"} Dec 15 08:21:31 crc kubenswrapper[4876]: I1215 08:21:31.959173 4876 generic.go:334] "Generic (PLEG): container finished" podID="28839bf7-6768-4c44-862e-d29281303b49" containerID="2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b" exitCode=0 Dec 15 08:21:31 crc kubenswrapper[4876]: I1215 08:21:31.959225 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlznd" event={"ID":"28839bf7-6768-4c44-862e-d29281303b49","Type":"ContainerDied","Data":"2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b"} Dec 15 08:21:33 crc kubenswrapper[4876]: I1215 08:21:33.977763 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlznd" event={"ID":"28839bf7-6768-4c44-862e-d29281303b49","Type":"ContainerStarted","Data":"f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43"} Dec 15 08:21:33 crc kubenswrapper[4876]: I1215 08:21:33.997035 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xlznd" podStartSLOduration=3.046875232 podStartE2EDuration="6.997007228s" podCreationTimestamp="2025-12-15 08:21:27 +0000 UTC" firstStartedPulling="2025-12-15 08:21:28.934047523 +0000 UTC m=+5414.505190434" lastFinishedPulling="2025-12-15 08:21:32.884179519 +0000 UTC m=+5418.455322430" observedRunningTime="2025-12-15 08:21:33.992823705 +0000 UTC m=+5419.563966606" watchObservedRunningTime="2025-12-15 08:21:33.997007228 +0000 UTC m=+5419.568150149" Dec 15 08:21:34 crc kubenswrapper[4876]: I1215 08:21:34.381777 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:34 crc kubenswrapper[4876]: I1215 08:21:34.381871 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:34 crc kubenswrapper[4876]: I1215 08:21:34.441133 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:35 crc kubenswrapper[4876]: I1215 08:21:35.021980 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:35 crc kubenswrapper[4876]: I1215 08:21:35.646609 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qt59k"] Dec 15 08:21:36 crc kubenswrapper[4876]: I1215 08:21:36.994283 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qt59k" podUID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerName="registry-server" containerID="cri-o://cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65" gracePeriod=2 Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.422751 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.565781 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhf7s\" (UniqueName: \"kubernetes.io/projected/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-kube-api-access-jhf7s\") pod \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.565886 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-utilities\") pod \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.565983 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-catalog-content\") pod \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\" (UID: \"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f\") " Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.567156 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-utilities" (OuterVolumeSpecName: "utilities") pod "89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" (UID: "89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.577607 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-kube-api-access-jhf7s" (OuterVolumeSpecName: "kube-api-access-jhf7s") pod "89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" (UID: "89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f"). InnerVolumeSpecName "kube-api-access-jhf7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.668136 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhf7s\" (UniqueName: \"kubernetes.io/projected/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-kube-api-access-jhf7s\") on node \"crc\" DevicePath \"\"" Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.668177 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.762080 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" (UID: "89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.769873 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.797260 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:37 crc kubenswrapper[4876]: I1215 08:21:37.797322 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.005222 4876 generic.go:334] "Generic (PLEG): container finished" podID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerID="cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65" exitCode=0 Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.005274 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt59k" event={"ID":"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f","Type":"ContainerDied","Data":"cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65"} Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.005303 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qt59k" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.005344 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qt59k" event={"ID":"89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f","Type":"ContainerDied","Data":"b0a84a28128eb565acdfba176b325133ac61b7a732fa3d16434d0b0bead81583"} Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.005366 4876 scope.go:117] "RemoveContainer" containerID="cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.041417 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qt59k"] Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.042405 4876 scope.go:117] "RemoveContainer" containerID="56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.047731 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qt59k"] Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.061464 4876 scope.go:117] "RemoveContainer" containerID="f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.087470 4876 scope.go:117] "RemoveContainer" containerID="cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65" Dec 15 08:21:38 crc kubenswrapper[4876]: E1215 08:21:38.087941 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65\": container with ID starting with cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65 not found: ID does not exist" containerID="cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.087988 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65"} err="failed to get container status \"cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65\": rpc error: code = NotFound desc = could not find container \"cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65\": container with ID starting with cd790ca0bc4de5228816b3337cb9ecafc95b070f4c0de40b6373b8245f4b1e65 not found: ID does not exist" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.088018 4876 scope.go:117] "RemoveContainer" containerID="56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535" Dec 15 08:21:38 crc kubenswrapper[4876]: E1215 08:21:38.088341 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535\": container with ID starting with 56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535 not found: ID does not exist" containerID="56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.088367 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535"} err="failed to get container status \"56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535\": rpc error: code = NotFound desc = could not find container \"56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535\": container with ID starting with 56aafd340e7d9b10d8bb19e696e17b67903d31712c4c68f007fccd11fb75d535 not found: ID does not exist" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.088381 4876 scope.go:117] "RemoveContainer" containerID="f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a" Dec 15 08:21:38 crc kubenswrapper[4876]: E1215 08:21:38.088607 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a\": container with ID starting with f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a not found: ID does not exist" containerID="f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.088631 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a"} err="failed to get container status \"f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a\": rpc error: code = NotFound desc = could not find container \"f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a\": container with ID starting with f8ac9d76daa881e8ada583390097f8b444f4fa42e42805f2ed4935cd89b59c4a not found: ID does not exist" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.715024 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" path="/var/lib/kubelet/pods/89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f/volumes" Dec 15 08:21:38 crc kubenswrapper[4876]: I1215 08:21:38.845513 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xlznd" podUID="28839bf7-6768-4c44-862e-d29281303b49" containerName="registry-server" probeResult="failure" output=< Dec 15 08:21:38 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 08:21:38 crc kubenswrapper[4876]: > Dec 15 08:21:47 crc kubenswrapper[4876]: I1215 08:21:47.840338 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:47 crc kubenswrapper[4876]: I1215 08:21:47.881934 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:48 crc kubenswrapper[4876]: I1215 08:21:48.073568 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xlznd"] Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.098798 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xlznd" podUID="28839bf7-6768-4c44-862e-d29281303b49" containerName="registry-server" containerID="cri-o://f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43" gracePeriod=2 Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.508315 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.630156 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-catalog-content\") pod \"28839bf7-6768-4c44-862e-d29281303b49\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.630335 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-utilities\") pod \"28839bf7-6768-4c44-862e-d29281303b49\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.630373 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hsqq\" (UniqueName: \"kubernetes.io/projected/28839bf7-6768-4c44-862e-d29281303b49-kube-api-access-2hsqq\") pod \"28839bf7-6768-4c44-862e-d29281303b49\" (UID: \"28839bf7-6768-4c44-862e-d29281303b49\") " Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.631990 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-utilities" (OuterVolumeSpecName: "utilities") pod "28839bf7-6768-4c44-862e-d29281303b49" (UID: "28839bf7-6768-4c44-862e-d29281303b49"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.635831 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28839bf7-6768-4c44-862e-d29281303b49-kube-api-access-2hsqq" (OuterVolumeSpecName: "kube-api-access-2hsqq") pod "28839bf7-6768-4c44-862e-d29281303b49" (UID: "28839bf7-6768-4c44-862e-d29281303b49"). InnerVolumeSpecName "kube-api-access-2hsqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.732254 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.732505 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hsqq\" (UniqueName: \"kubernetes.io/projected/28839bf7-6768-4c44-862e-d29281303b49-kube-api-access-2hsqq\") on node \"crc\" DevicePath \"\"" Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.762402 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28839bf7-6768-4c44-862e-d29281303b49" (UID: "28839bf7-6768-4c44-862e-d29281303b49"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:21:49 crc kubenswrapper[4876]: I1215 08:21:49.833405 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28839bf7-6768-4c44-862e-d29281303b49-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.106917 4876 generic.go:334] "Generic (PLEG): container finished" podID="28839bf7-6768-4c44-862e-d29281303b49" containerID="f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43" exitCode=0 Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.106960 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xlznd" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.106969 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlznd" event={"ID":"28839bf7-6768-4c44-862e-d29281303b49","Type":"ContainerDied","Data":"f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43"} Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.107005 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xlznd" event={"ID":"28839bf7-6768-4c44-862e-d29281303b49","Type":"ContainerDied","Data":"19e7b4077043470c051bbbd0a349a56aa5ccbc8f0c7be4fc6bb65ea8a1c2e8ae"} Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.107025 4876 scope.go:117] "RemoveContainer" containerID="f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.126221 4876 scope.go:117] "RemoveContainer" containerID="2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.142478 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xlznd"] Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.150017 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xlznd"] Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.159276 4876 scope.go:117] "RemoveContainer" containerID="ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.175750 4876 scope.go:117] "RemoveContainer" containerID="f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43" Dec 15 08:21:50 crc kubenswrapper[4876]: E1215 08:21:50.176226 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43\": container with ID starting with f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43 not found: ID does not exist" containerID="f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.176266 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43"} err="failed to get container status \"f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43\": rpc error: code = NotFound desc = could not find container \"f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43\": container with ID starting with f163339f6156fbd63acd4a0f5f946d6cbe7259241ede2f19475132c6fcfbce43 not found: ID does not exist" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.176291 4876 scope.go:117] "RemoveContainer" containerID="2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b" Dec 15 08:21:50 crc kubenswrapper[4876]: E1215 08:21:50.176747 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b\": container with ID starting with 2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b not found: ID does not exist" containerID="2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.176790 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b"} err="failed to get container status \"2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b\": rpc error: code = NotFound desc = could not find container \"2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b\": container with ID starting with 2245b47343467ff881788accd40278c2069c8fa510ffdec56eb0f841a0b0cb6b not found: ID does not exist" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.176817 4876 scope.go:117] "RemoveContainer" containerID="ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956" Dec 15 08:21:50 crc kubenswrapper[4876]: E1215 08:21:50.177137 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956\": container with ID starting with ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956 not found: ID does not exist" containerID="ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.177159 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956"} err="failed to get container status \"ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956\": rpc error: code = NotFound desc = could not find container \"ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956\": container with ID starting with ba5971df2f0ccc384be1d6960bf821551a22da17a9960d6b19734ecac7f9d956 not found: ID does not exist" Dec 15 08:21:50 crc kubenswrapper[4876]: I1215 08:21:50.716643 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28839bf7-6768-4c44-862e-d29281303b49" path="/var/lib/kubelet/pods/28839bf7-6768-4c44-862e-d29281303b49/volumes" Dec 15 08:21:57 crc kubenswrapper[4876]: I1215 08:21:57.322495 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:21:57 crc kubenswrapper[4876]: I1215 08:21:57.323057 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:22:27 crc kubenswrapper[4876]: I1215 08:22:27.322986 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:22:27 crc kubenswrapper[4876]: I1215 08:22:27.323517 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:22:27 crc kubenswrapper[4876]: I1215 08:22:27.323570 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 08:22:27 crc kubenswrapper[4876]: I1215 08:22:27.324279 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"45133dc5d533de364371d0fb936eb97fa02805ba5042019bd072892b716669da"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 08:22:27 crc kubenswrapper[4876]: I1215 08:22:27.324341 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://45133dc5d533de364371d0fb936eb97fa02805ba5042019bd072892b716669da" gracePeriod=600 Dec 15 08:22:28 crc kubenswrapper[4876]: I1215 08:22:28.383094 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="45133dc5d533de364371d0fb936eb97fa02805ba5042019bd072892b716669da" exitCode=0 Dec 15 08:22:28 crc kubenswrapper[4876]: I1215 08:22:28.383180 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"45133dc5d533de364371d0fb936eb97fa02805ba5042019bd072892b716669da"} Dec 15 08:22:28 crc kubenswrapper[4876]: I1215 08:22:28.383431 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78"} Dec 15 08:22:28 crc kubenswrapper[4876]: I1215 08:22:28.383452 4876 scope.go:117] "RemoveContainer" containerID="98144ffd20dde0ea000f848c259a15dc92f37760d3e4ed6576e1be727a98c5cb" Dec 15 08:24:27 crc kubenswrapper[4876]: I1215 08:24:27.322944 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:24:27 crc kubenswrapper[4876]: I1215 08:24:27.323555 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:24:57 crc kubenswrapper[4876]: I1215 08:24:57.323188 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:24:57 crc kubenswrapper[4876]: I1215 08:24:57.323852 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:25:27 crc kubenswrapper[4876]: I1215 08:25:27.323240 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:25:27 crc kubenswrapper[4876]: I1215 08:25:27.323831 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:25:27 crc kubenswrapper[4876]: I1215 08:25:27.323881 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 08:25:27 crc kubenswrapper[4876]: I1215 08:25:27.324568 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 08:25:27 crc kubenswrapper[4876]: I1215 08:25:27.324621 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" gracePeriod=600 Dec 15 08:25:27 crc kubenswrapper[4876]: E1215 08:25:27.449887 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:25:28 crc kubenswrapper[4876]: I1215 08:25:28.030434 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" exitCode=0 Dec 15 08:25:28 crc kubenswrapper[4876]: I1215 08:25:28.030503 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78"} Dec 15 08:25:28 crc kubenswrapper[4876]: I1215 08:25:28.030792 4876 scope.go:117] "RemoveContainer" containerID="45133dc5d533de364371d0fb936eb97fa02805ba5042019bd072892b716669da" Dec 15 08:25:28 crc kubenswrapper[4876]: I1215 08:25:28.031442 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:25:28 crc kubenswrapper[4876]: E1215 08:25:28.031786 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:25:42 crc kubenswrapper[4876]: I1215 08:25:42.705847 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:25:42 crc kubenswrapper[4876]: E1215 08:25:42.706731 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.873934 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6jgrw"] Dec 15 08:25:53 crc kubenswrapper[4876]: E1215 08:25:53.874827 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerName="extract-utilities" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.874843 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerName="extract-utilities" Dec 15 08:25:53 crc kubenswrapper[4876]: E1215 08:25:53.874856 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28839bf7-6768-4c44-862e-d29281303b49" containerName="extract-content" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.874863 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="28839bf7-6768-4c44-862e-d29281303b49" containerName="extract-content" Dec 15 08:25:53 crc kubenswrapper[4876]: E1215 08:25:53.874875 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerName="extract-content" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.874882 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerName="extract-content" Dec 15 08:25:53 crc kubenswrapper[4876]: E1215 08:25:53.874890 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerName="registry-server" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.874896 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerName="registry-server" Dec 15 08:25:53 crc kubenswrapper[4876]: E1215 08:25:53.874909 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28839bf7-6768-4c44-862e-d29281303b49" containerName="extract-utilities" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.874915 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="28839bf7-6768-4c44-862e-d29281303b49" containerName="extract-utilities" Dec 15 08:25:53 crc kubenswrapper[4876]: E1215 08:25:53.874928 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28839bf7-6768-4c44-862e-d29281303b49" containerName="registry-server" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.874935 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="28839bf7-6768-4c44-862e-d29281303b49" containerName="registry-server" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.875056 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="28839bf7-6768-4c44-862e-d29281303b49" containerName="registry-server" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.875074 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="89f0e4f4-3b54-46ce-bf6b-7c8443a84a5f" containerName="registry-server" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.876464 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:53 crc kubenswrapper[4876]: I1215 08:25:53.899589 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6jgrw"] Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.036442 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-utilities\") pod \"community-operators-6jgrw\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.036494 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-catalog-content\") pod \"community-operators-6jgrw\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.036527 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmf2k\" (UniqueName: \"kubernetes.io/projected/09255af3-7a3f-4b15-b6f2-f4775caf967c-kube-api-access-pmf2k\") pod \"community-operators-6jgrw\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.137437 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-utilities\") pod \"community-operators-6jgrw\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.137512 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-catalog-content\") pod \"community-operators-6jgrw\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.137541 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmf2k\" (UniqueName: \"kubernetes.io/projected/09255af3-7a3f-4b15-b6f2-f4775caf967c-kube-api-access-pmf2k\") pod \"community-operators-6jgrw\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.138041 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-utilities\") pod \"community-operators-6jgrw\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.138324 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-catalog-content\") pod \"community-operators-6jgrw\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.166551 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmf2k\" (UniqueName: \"kubernetes.io/projected/09255af3-7a3f-4b15-b6f2-f4775caf967c-kube-api-access-pmf2k\") pod \"community-operators-6jgrw\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.198416 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:25:54 crc kubenswrapper[4876]: W1215 08:25:54.690522 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09255af3_7a3f_4b15_b6f2_f4775caf967c.slice/crio-67656e9f60b9ac1a5f6c29cc302a644d1916a8373069a32f3c412ca36eb26667 WatchSource:0}: Error finding container 67656e9f60b9ac1a5f6c29cc302a644d1916a8373069a32f3c412ca36eb26667: Status 404 returned error can't find the container with id 67656e9f60b9ac1a5f6c29cc302a644d1916a8373069a32f3c412ca36eb26667 Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.693737 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6jgrw"] Dec 15 08:25:54 crc kubenswrapper[4876]: I1215 08:25:54.710494 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:25:54 crc kubenswrapper[4876]: E1215 08:25:54.710760 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:25:55 crc kubenswrapper[4876]: I1215 08:25:55.211859 4876 generic.go:334] "Generic (PLEG): container finished" podID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerID="239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea" exitCode=0 Dec 15 08:25:55 crc kubenswrapper[4876]: I1215 08:25:55.211902 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jgrw" event={"ID":"09255af3-7a3f-4b15-b6f2-f4775caf967c","Type":"ContainerDied","Data":"239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea"} Dec 15 08:25:55 crc kubenswrapper[4876]: I1215 08:25:55.211925 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jgrw" event={"ID":"09255af3-7a3f-4b15-b6f2-f4775caf967c","Type":"ContainerStarted","Data":"67656e9f60b9ac1a5f6c29cc302a644d1916a8373069a32f3c412ca36eb26667"} Dec 15 08:25:55 crc kubenswrapper[4876]: I1215 08:25:55.213866 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 08:25:57 crc kubenswrapper[4876]: I1215 08:25:57.227861 4876 generic.go:334] "Generic (PLEG): container finished" podID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerID="5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b" exitCode=0 Dec 15 08:25:57 crc kubenswrapper[4876]: I1215 08:25:57.227958 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jgrw" event={"ID":"09255af3-7a3f-4b15-b6f2-f4775caf967c","Type":"ContainerDied","Data":"5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b"} Dec 15 08:25:58 crc kubenswrapper[4876]: I1215 08:25:58.235587 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jgrw" event={"ID":"09255af3-7a3f-4b15-b6f2-f4775caf967c","Type":"ContainerStarted","Data":"aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4"} Dec 15 08:25:58 crc kubenswrapper[4876]: I1215 08:25:58.255636 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6jgrw" podStartSLOduration=2.817301327 podStartE2EDuration="5.255614729s" podCreationTimestamp="2025-12-15 08:25:53 +0000 UTC" firstStartedPulling="2025-12-15 08:25:55.213539338 +0000 UTC m=+5680.784682249" lastFinishedPulling="2025-12-15 08:25:57.65185274 +0000 UTC m=+5683.222995651" observedRunningTime="2025-12-15 08:25:58.255289999 +0000 UTC m=+5683.826432920" watchObservedRunningTime="2025-12-15 08:25:58.255614729 +0000 UTC m=+5683.826757660" Dec 15 08:26:04 crc kubenswrapper[4876]: I1215 08:26:04.198940 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:26:04 crc kubenswrapper[4876]: I1215 08:26:04.199597 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:26:04 crc kubenswrapper[4876]: I1215 08:26:04.246848 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:26:04 crc kubenswrapper[4876]: I1215 08:26:04.329294 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:26:04 crc kubenswrapper[4876]: I1215 08:26:04.484357 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6jgrw"] Dec 15 08:26:05 crc kubenswrapper[4876]: I1215 08:26:05.705721 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:26:05 crc kubenswrapper[4876]: E1215 08:26:05.706234 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:26:06 crc kubenswrapper[4876]: I1215 08:26:06.288259 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6jgrw" podUID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerName="registry-server" containerID="cri-o://aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4" gracePeriod=2 Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.284846 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.297403 4876 generic.go:334] "Generic (PLEG): container finished" podID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerID="aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4" exitCode=0 Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.297483 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6jgrw" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.297468 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jgrw" event={"ID":"09255af3-7a3f-4b15-b6f2-f4775caf967c","Type":"ContainerDied","Data":"aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4"} Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.297561 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6jgrw" event={"ID":"09255af3-7a3f-4b15-b6f2-f4775caf967c","Type":"ContainerDied","Data":"67656e9f60b9ac1a5f6c29cc302a644d1916a8373069a32f3c412ca36eb26667"} Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.297601 4876 scope.go:117] "RemoveContainer" containerID="aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.320405 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmf2k\" (UniqueName: \"kubernetes.io/projected/09255af3-7a3f-4b15-b6f2-f4775caf967c-kube-api-access-pmf2k\") pod \"09255af3-7a3f-4b15-b6f2-f4775caf967c\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.320450 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-utilities\") pod \"09255af3-7a3f-4b15-b6f2-f4775caf967c\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.320540 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-catalog-content\") pod \"09255af3-7a3f-4b15-b6f2-f4775caf967c\" (UID: \"09255af3-7a3f-4b15-b6f2-f4775caf967c\") " Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.321435 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-utilities" (OuterVolumeSpecName: "utilities") pod "09255af3-7a3f-4b15-b6f2-f4775caf967c" (UID: "09255af3-7a3f-4b15-b6f2-f4775caf967c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.326457 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09255af3-7a3f-4b15-b6f2-f4775caf967c-kube-api-access-pmf2k" (OuterVolumeSpecName: "kube-api-access-pmf2k") pod "09255af3-7a3f-4b15-b6f2-f4775caf967c" (UID: "09255af3-7a3f-4b15-b6f2-f4775caf967c"). InnerVolumeSpecName "kube-api-access-pmf2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.336247 4876 scope.go:117] "RemoveContainer" containerID="5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.358963 4876 scope.go:117] "RemoveContainer" containerID="239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.381052 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09255af3-7a3f-4b15-b6f2-f4775caf967c" (UID: "09255af3-7a3f-4b15-b6f2-f4775caf967c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.387972 4876 scope.go:117] "RemoveContainer" containerID="aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4" Dec 15 08:26:07 crc kubenswrapper[4876]: E1215 08:26:07.391621 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4\": container with ID starting with aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4 not found: ID does not exist" containerID="aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.391651 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4"} err="failed to get container status \"aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4\": rpc error: code = NotFound desc = could not find container \"aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4\": container with ID starting with aa2083d22f39407dd7e25e60e6035af296893f39e2909d6e83be77cdfef8dfe4 not found: ID does not exist" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.391672 4876 scope.go:117] "RemoveContainer" containerID="5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b" Dec 15 08:26:07 crc kubenswrapper[4876]: E1215 08:26:07.392310 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b\": container with ID starting with 5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b not found: ID does not exist" containerID="5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.392344 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b"} err="failed to get container status \"5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b\": rpc error: code = NotFound desc = could not find container \"5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b\": container with ID starting with 5022b0a2c70daca1c58c9682479a33be4574cd58727124fae2d447cccfe57d9b not found: ID does not exist" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.392368 4876 scope.go:117] "RemoveContainer" containerID="239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea" Dec 15 08:26:07 crc kubenswrapper[4876]: E1215 08:26:07.392947 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea\": container with ID starting with 239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea not found: ID does not exist" containerID="239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.392971 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea"} err="failed to get container status \"239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea\": rpc error: code = NotFound desc = could not find container \"239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea\": container with ID starting with 239d2caff11fadc850dfa6f139c2708bfa98e4e1118989c1e48c68598881f9ea not found: ID does not exist" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.421966 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmf2k\" (UniqueName: \"kubernetes.io/projected/09255af3-7a3f-4b15-b6f2-f4775caf967c-kube-api-access-pmf2k\") on node \"crc\" DevicePath \"\"" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.421998 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.422011 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09255af3-7a3f-4b15-b6f2-f4775caf967c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.632635 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6jgrw"] Dec 15 08:26:07 crc kubenswrapper[4876]: I1215 08:26:07.640028 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6jgrw"] Dec 15 08:26:08 crc kubenswrapper[4876]: I1215 08:26:08.714005 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09255af3-7a3f-4b15-b6f2-f4775caf967c" path="/var/lib/kubelet/pods/09255af3-7a3f-4b15-b6f2-f4775caf967c/volumes" Dec 15 08:26:18 crc kubenswrapper[4876]: I1215 08:26:18.705763 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:26:18 crc kubenswrapper[4876]: E1215 08:26:18.706665 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:26:32 crc kubenswrapper[4876]: I1215 08:26:32.705450 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:26:32 crc kubenswrapper[4876]: E1215 08:26:32.706558 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:26:43 crc kubenswrapper[4876]: I1215 08:26:43.705540 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:26:43 crc kubenswrapper[4876]: E1215 08:26:43.706971 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:26:54 crc kubenswrapper[4876]: I1215 08:26:54.710067 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:26:54 crc kubenswrapper[4876]: E1215 08:26:54.712444 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:27:09 crc kubenswrapper[4876]: I1215 08:27:09.705789 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:27:09 crc kubenswrapper[4876]: E1215 08:27:09.706661 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:27:21 crc kubenswrapper[4876]: I1215 08:27:21.705260 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:27:21 crc kubenswrapper[4876]: E1215 08:27:21.706022 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:27:36 crc kubenswrapper[4876]: I1215 08:27:36.705805 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:27:36 crc kubenswrapper[4876]: E1215 08:27:36.706632 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:27:50 crc kubenswrapper[4876]: I1215 08:27:50.705063 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:27:50 crc kubenswrapper[4876]: E1215 08:27:50.705897 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:28:04 crc kubenswrapper[4876]: I1215 08:28:04.709120 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:28:04 crc kubenswrapper[4876]: E1215 08:28:04.709813 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:28:18 crc kubenswrapper[4876]: I1215 08:28:18.705219 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:28:18 crc kubenswrapper[4876]: E1215 08:28:18.705958 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:28:26 crc kubenswrapper[4876]: I1215 08:28:26.922281 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-cccw6"] Dec 15 08:28:26 crc kubenswrapper[4876]: I1215 08:28:26.928174 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-cccw6"] Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.040244 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-kqqkx"] Dec 15 08:28:27 crc kubenswrapper[4876]: E1215 08:28:27.040653 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerName="extract-content" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.040680 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerName="extract-content" Dec 15 08:28:27 crc kubenswrapper[4876]: E1215 08:28:27.040699 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerName="registry-server" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.040708 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerName="registry-server" Dec 15 08:28:27 crc kubenswrapper[4876]: E1215 08:28:27.040726 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerName="extract-utilities" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.040736 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerName="extract-utilities" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.041074 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="09255af3-7a3f-4b15-b6f2-f4775caf967c" containerName="registry-server" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.041632 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.043597 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.043597 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.043758 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.045339 4876 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-gjdrn" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.050696 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-kqqkx"] Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.115673 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m5xp\" (UniqueName: \"kubernetes.io/projected/6462dc04-b58d-45e6-b45b-6196845f9e90-kube-api-access-2m5xp\") pod \"crc-storage-crc-kqqkx\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.116013 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6462dc04-b58d-45e6-b45b-6196845f9e90-node-mnt\") pod \"crc-storage-crc-kqqkx\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.116203 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6462dc04-b58d-45e6-b45b-6196845f9e90-crc-storage\") pod \"crc-storage-crc-kqqkx\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.217658 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6462dc04-b58d-45e6-b45b-6196845f9e90-node-mnt\") pod \"crc-storage-crc-kqqkx\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.217791 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6462dc04-b58d-45e6-b45b-6196845f9e90-crc-storage\") pod \"crc-storage-crc-kqqkx\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.217824 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m5xp\" (UniqueName: \"kubernetes.io/projected/6462dc04-b58d-45e6-b45b-6196845f9e90-kube-api-access-2m5xp\") pod \"crc-storage-crc-kqqkx\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.217905 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6462dc04-b58d-45e6-b45b-6196845f9e90-node-mnt\") pod \"crc-storage-crc-kqqkx\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.218689 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6462dc04-b58d-45e6-b45b-6196845f9e90-crc-storage\") pod \"crc-storage-crc-kqqkx\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.243022 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m5xp\" (UniqueName: \"kubernetes.io/projected/6462dc04-b58d-45e6-b45b-6196845f9e90-kube-api-access-2m5xp\") pod \"crc-storage-crc-kqqkx\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.361196 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:27 crc kubenswrapper[4876]: I1215 08:28:27.761066 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-kqqkx"] Dec 15 08:28:28 crc kubenswrapper[4876]: I1215 08:28:28.497449 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kqqkx" event={"ID":"6462dc04-b58d-45e6-b45b-6196845f9e90","Type":"ContainerStarted","Data":"3ef889875baf3c5e5f13e66d64e37c52fe49c1fa6d76cf030674aab6e94984a8"} Dec 15 08:28:28 crc kubenswrapper[4876]: I1215 08:28:28.716734 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ebf5f89-26e8-4ee0-9de4-2c218574e6df" path="/var/lib/kubelet/pods/1ebf5f89-26e8-4ee0-9de4-2c218574e6df/volumes" Dec 15 08:28:29 crc kubenswrapper[4876]: I1215 08:28:29.506226 4876 generic.go:334] "Generic (PLEG): container finished" podID="6462dc04-b58d-45e6-b45b-6196845f9e90" containerID="7926936bc9ba6368a30dd972ab4e1b4c63238bf7c0b05e7328d0258c104e6253" exitCode=0 Dec 15 08:28:29 crc kubenswrapper[4876]: I1215 08:28:29.506280 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kqqkx" event={"ID":"6462dc04-b58d-45e6-b45b-6196845f9e90","Type":"ContainerDied","Data":"7926936bc9ba6368a30dd972ab4e1b4c63238bf7c0b05e7328d0258c104e6253"} Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.704945 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:28:30 crc kubenswrapper[4876]: E1215 08:28:30.705190 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.768758 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.871401 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6462dc04-b58d-45e6-b45b-6196845f9e90-node-mnt\") pod \"6462dc04-b58d-45e6-b45b-6196845f9e90\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.871573 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6462dc04-b58d-45e6-b45b-6196845f9e90-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "6462dc04-b58d-45e6-b45b-6196845f9e90" (UID: "6462dc04-b58d-45e6-b45b-6196845f9e90"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.872377 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m5xp\" (UniqueName: \"kubernetes.io/projected/6462dc04-b58d-45e6-b45b-6196845f9e90-kube-api-access-2m5xp\") pod \"6462dc04-b58d-45e6-b45b-6196845f9e90\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.872623 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6462dc04-b58d-45e6-b45b-6196845f9e90-crc-storage\") pod \"6462dc04-b58d-45e6-b45b-6196845f9e90\" (UID: \"6462dc04-b58d-45e6-b45b-6196845f9e90\") " Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.873157 4876 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6462dc04-b58d-45e6-b45b-6196845f9e90-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.877692 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6462dc04-b58d-45e6-b45b-6196845f9e90-kube-api-access-2m5xp" (OuterVolumeSpecName: "kube-api-access-2m5xp") pod "6462dc04-b58d-45e6-b45b-6196845f9e90" (UID: "6462dc04-b58d-45e6-b45b-6196845f9e90"). InnerVolumeSpecName "kube-api-access-2m5xp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.890808 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6462dc04-b58d-45e6-b45b-6196845f9e90-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "6462dc04-b58d-45e6-b45b-6196845f9e90" (UID: "6462dc04-b58d-45e6-b45b-6196845f9e90"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.974192 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m5xp\" (UniqueName: \"kubernetes.io/projected/6462dc04-b58d-45e6-b45b-6196845f9e90-kube-api-access-2m5xp\") on node \"crc\" DevicePath \"\"" Dec 15 08:28:30 crc kubenswrapper[4876]: I1215 08:28:30.974237 4876 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6462dc04-b58d-45e6-b45b-6196845f9e90-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 15 08:28:31 crc kubenswrapper[4876]: I1215 08:28:31.524383 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kqqkx" event={"ID":"6462dc04-b58d-45e6-b45b-6196845f9e90","Type":"ContainerDied","Data":"3ef889875baf3c5e5f13e66d64e37c52fe49c1fa6d76cf030674aab6e94984a8"} Dec 15 08:28:31 crc kubenswrapper[4876]: I1215 08:28:31.524431 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ef889875baf3c5e5f13e66d64e37c52fe49c1fa6d76cf030674aab6e94984a8" Dec 15 08:28:31 crc kubenswrapper[4876]: I1215 08:28:31.524489 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kqqkx" Dec 15 08:28:32 crc kubenswrapper[4876]: I1215 08:28:32.904396 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-kqqkx"] Dec 15 08:28:32 crc kubenswrapper[4876]: I1215 08:28:32.909516 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-kqqkx"] Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.019983 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-44tsf"] Dec 15 08:28:33 crc kubenswrapper[4876]: E1215 08:28:33.020387 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6462dc04-b58d-45e6-b45b-6196845f9e90" containerName="storage" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.020408 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6462dc04-b58d-45e6-b45b-6196845f9e90" containerName="storage" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.020582 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="6462dc04-b58d-45e6-b45b-6196845f9e90" containerName="storage" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.021041 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.022734 4876 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-gjdrn" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.023070 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.023233 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.023525 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.028219 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-44tsf"] Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.199263 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vns62\" (UniqueName: \"kubernetes.io/projected/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-kube-api-access-vns62\") pod \"crc-storage-crc-44tsf\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.199352 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-node-mnt\") pod \"crc-storage-crc-44tsf\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.199445 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-crc-storage\") pod \"crc-storage-crc-44tsf\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.301605 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vns62\" (UniqueName: \"kubernetes.io/projected/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-kube-api-access-vns62\") pod \"crc-storage-crc-44tsf\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.301715 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-node-mnt\") pod \"crc-storage-crc-44tsf\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.301753 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-crc-storage\") pod \"crc-storage-crc-44tsf\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.302062 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-node-mnt\") pod \"crc-storage-crc-44tsf\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.302560 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-crc-storage\") pod \"crc-storage-crc-44tsf\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.319360 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vns62\" (UniqueName: \"kubernetes.io/projected/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-kube-api-access-vns62\") pod \"crc-storage-crc-44tsf\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.344660 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:33 crc kubenswrapper[4876]: I1215 08:28:33.755280 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-44tsf"] Dec 15 08:28:34 crc kubenswrapper[4876]: I1215 08:28:34.545730 4876 generic.go:334] "Generic (PLEG): container finished" podID="aed8aea0-9fc8-4ca8-a95c-0299b8183ed8" containerID="ced9a0ac33ae4055568d0e9ef6f4cb80b37f7f5d55a471ae19e5cec6421d6f1f" exitCode=0 Dec 15 08:28:34 crc kubenswrapper[4876]: I1215 08:28:34.546000 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-44tsf" event={"ID":"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8","Type":"ContainerDied","Data":"ced9a0ac33ae4055568d0e9ef6f4cb80b37f7f5d55a471ae19e5cec6421d6f1f"} Dec 15 08:28:34 crc kubenswrapper[4876]: I1215 08:28:34.546026 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-44tsf" event={"ID":"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8","Type":"ContainerStarted","Data":"b4094b1fb918be725e4c22172692f34595aa9037ff114160478f69cca5a5a199"} Dec 15 08:28:34 crc kubenswrapper[4876]: I1215 08:28:34.717373 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6462dc04-b58d-45e6-b45b-6196845f9e90" path="/var/lib/kubelet/pods/6462dc04-b58d-45e6-b45b-6196845f9e90/volumes" Dec 15 08:28:35 crc kubenswrapper[4876]: I1215 08:28:35.869855 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.035455 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-node-mnt\") pod \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.035518 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-crc-storage\") pod \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.035555 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vns62\" (UniqueName: \"kubernetes.io/projected/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-kube-api-access-vns62\") pod \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\" (UID: \"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8\") " Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.036414 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "aed8aea0-9fc8-4ca8-a95c-0299b8183ed8" (UID: "aed8aea0-9fc8-4ca8-a95c-0299b8183ed8"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.040268 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-kube-api-access-vns62" (OuterVolumeSpecName: "kube-api-access-vns62") pod "aed8aea0-9fc8-4ca8-a95c-0299b8183ed8" (UID: "aed8aea0-9fc8-4ca8-a95c-0299b8183ed8"). InnerVolumeSpecName "kube-api-access-vns62". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.052022 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "aed8aea0-9fc8-4ca8-a95c-0299b8183ed8" (UID: "aed8aea0-9fc8-4ca8-a95c-0299b8183ed8"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.137557 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vns62\" (UniqueName: \"kubernetes.io/projected/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-kube-api-access-vns62\") on node \"crc\" DevicePath \"\"" Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.137593 4876 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.137629 4876 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/aed8aea0-9fc8-4ca8-a95c-0299b8183ed8-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.562435 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-44tsf" event={"ID":"aed8aea0-9fc8-4ca8-a95c-0299b8183ed8","Type":"ContainerDied","Data":"b4094b1fb918be725e4c22172692f34595aa9037ff114160478f69cca5a5a199"} Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.562482 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4094b1fb918be725e4c22172692f34595aa9037ff114160478f69cca5a5a199" Dec 15 08:28:36 crc kubenswrapper[4876]: I1215 08:28:36.562553 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-44tsf" Dec 15 08:28:42 crc kubenswrapper[4876]: I1215 08:28:42.705352 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:28:42 crc kubenswrapper[4876]: E1215 08:28:42.706343 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:28:54 crc kubenswrapper[4876]: I1215 08:28:54.709992 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:28:54 crc kubenswrapper[4876]: E1215 08:28:54.712966 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:29:09 crc kubenswrapper[4876]: I1215 08:29:09.705283 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:29:09 crc kubenswrapper[4876]: E1215 08:29:09.706167 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:29:22 crc kubenswrapper[4876]: I1215 08:29:22.399504 4876 scope.go:117] "RemoveContainer" containerID="1fc9f577b5556289762f33baba66a8d91aac233d49845b27d2f08f03dcfa104c" Dec 15 08:29:23 crc kubenswrapper[4876]: I1215 08:29:23.706047 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:29:23 crc kubenswrapper[4876]: E1215 08:29:23.706869 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:29:35 crc kubenswrapper[4876]: I1215 08:29:35.705656 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:29:35 crc kubenswrapper[4876]: E1215 08:29:35.706563 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:29:48 crc kubenswrapper[4876]: I1215 08:29:48.705753 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:29:48 crc kubenswrapper[4876]: E1215 08:29:48.706965 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:29:59 crc kubenswrapper[4876]: I1215 08:29:59.705711 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:29:59 crc kubenswrapper[4876]: E1215 08:29:59.706892 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.145852 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t"] Dec 15 08:30:00 crc kubenswrapper[4876]: E1215 08:30:00.146370 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed8aea0-9fc8-4ca8-a95c-0299b8183ed8" containerName="storage" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.146402 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed8aea0-9fc8-4ca8-a95c-0299b8183ed8" containerName="storage" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.146603 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed8aea0-9fc8-4ca8-a95c-0299b8183ed8" containerName="storage" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.147296 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.149435 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.150087 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.159838 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t"] Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.245608 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-config-volume\") pod \"collect-profiles-29429790-nps5t\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.245657 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrngl\" (UniqueName: \"kubernetes.io/projected/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-kube-api-access-jrngl\") pod \"collect-profiles-29429790-nps5t\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.245742 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-secret-volume\") pod \"collect-profiles-29429790-nps5t\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.346210 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-config-volume\") pod \"collect-profiles-29429790-nps5t\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.346269 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrngl\" (UniqueName: \"kubernetes.io/projected/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-kube-api-access-jrngl\") pod \"collect-profiles-29429790-nps5t\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.346332 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-secret-volume\") pod \"collect-profiles-29429790-nps5t\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.347261 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-config-volume\") pod \"collect-profiles-29429790-nps5t\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.352907 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-secret-volume\") pod \"collect-profiles-29429790-nps5t\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.368617 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrngl\" (UniqueName: \"kubernetes.io/projected/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-kube-api-access-jrngl\") pod \"collect-profiles-29429790-nps5t\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.476766 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:00 crc kubenswrapper[4876]: I1215 08:30:00.875783 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t"] Dec 15 08:30:01 crc kubenswrapper[4876]: I1215 08:30:01.154937 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" event={"ID":"2637c57c-34ea-4a9a-860f-b8cc1b5adec4","Type":"ContainerStarted","Data":"9e973bff45584da78eb97c336fd6b6f895e5f9f9082691139256f10e86e05345"} Dec 15 08:30:01 crc kubenswrapper[4876]: I1215 08:30:01.154980 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" event={"ID":"2637c57c-34ea-4a9a-860f-b8cc1b5adec4","Type":"ContainerStarted","Data":"e3d5eb86029c5ca52a70b9510b6fe3c02055bb10c0816305927c69376ff49a45"} Dec 15 08:30:01 crc kubenswrapper[4876]: I1215 08:30:01.173936 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" podStartSLOduration=1.173915712 podStartE2EDuration="1.173915712s" podCreationTimestamp="2025-12-15 08:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:30:01.169859942 +0000 UTC m=+5926.741002853" watchObservedRunningTime="2025-12-15 08:30:01.173915712 +0000 UTC m=+5926.745058623" Dec 15 08:30:02 crc kubenswrapper[4876]: I1215 08:30:02.165262 4876 generic.go:334] "Generic (PLEG): container finished" podID="2637c57c-34ea-4a9a-860f-b8cc1b5adec4" containerID="9e973bff45584da78eb97c336fd6b6f895e5f9f9082691139256f10e86e05345" exitCode=0 Dec 15 08:30:02 crc kubenswrapper[4876]: I1215 08:30:02.165300 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" event={"ID":"2637c57c-34ea-4a9a-860f-b8cc1b5adec4","Type":"ContainerDied","Data":"9e973bff45584da78eb97c336fd6b6f895e5f9f9082691139256f10e86e05345"} Dec 15 08:30:03 crc kubenswrapper[4876]: I1215 08:30:03.427165 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:03 crc kubenswrapper[4876]: I1215 08:30:03.494882 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-secret-volume\") pod \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " Dec 15 08:30:03 crc kubenswrapper[4876]: I1215 08:30:03.510706 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2637c57c-34ea-4a9a-860f-b8cc1b5adec4" (UID: "2637c57c-34ea-4a9a-860f-b8cc1b5adec4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:30:03 crc kubenswrapper[4876]: I1215 08:30:03.596284 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrngl\" (UniqueName: \"kubernetes.io/projected/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-kube-api-access-jrngl\") pod \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " Dec 15 08:30:03 crc kubenswrapper[4876]: I1215 08:30:03.596354 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-config-volume\") pod \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\" (UID: \"2637c57c-34ea-4a9a-860f-b8cc1b5adec4\") " Dec 15 08:30:03 crc kubenswrapper[4876]: I1215 08:30:03.596717 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 08:30:03 crc kubenswrapper[4876]: I1215 08:30:03.597130 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-config-volume" (OuterVolumeSpecName: "config-volume") pod "2637c57c-34ea-4a9a-860f-b8cc1b5adec4" (UID: "2637c57c-34ea-4a9a-860f-b8cc1b5adec4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:30:03 crc kubenswrapper[4876]: I1215 08:30:03.606501 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-kube-api-access-jrngl" (OuterVolumeSpecName: "kube-api-access-jrngl") pod "2637c57c-34ea-4a9a-860f-b8cc1b5adec4" (UID: "2637c57c-34ea-4a9a-860f-b8cc1b5adec4"). InnerVolumeSpecName "kube-api-access-jrngl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:30:03 crc kubenswrapper[4876]: I1215 08:30:03.697614 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrngl\" (UniqueName: \"kubernetes.io/projected/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-kube-api-access-jrngl\") on node \"crc\" DevicePath \"\"" Dec 15 08:30:03 crc kubenswrapper[4876]: I1215 08:30:03.697650 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2637c57c-34ea-4a9a-860f-b8cc1b5adec4-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 08:30:04 crc kubenswrapper[4876]: I1215 08:30:04.180670 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" event={"ID":"2637c57c-34ea-4a9a-860f-b8cc1b5adec4","Type":"ContainerDied","Data":"e3d5eb86029c5ca52a70b9510b6fe3c02055bb10c0816305927c69376ff49a45"} Dec 15 08:30:04 crc kubenswrapper[4876]: I1215 08:30:04.180726 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3d5eb86029c5ca52a70b9510b6fe3c02055bb10c0816305927c69376ff49a45" Dec 15 08:30:04 crc kubenswrapper[4876]: I1215 08:30:04.180730 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t" Dec 15 08:30:04 crc kubenswrapper[4876]: I1215 08:30:04.238760 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq"] Dec 15 08:30:04 crc kubenswrapper[4876]: I1215 08:30:04.243196 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429745-gp6xq"] Dec 15 08:30:04 crc kubenswrapper[4876]: I1215 08:30:04.714142 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26fa769a-15b5-43c0-ac55-8bae8cd62876" path="/var/lib/kubelet/pods/26fa769a-15b5-43c0-ac55-8bae8cd62876/volumes" Dec 15 08:30:11 crc kubenswrapper[4876]: I1215 08:30:11.705294 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:30:11 crc kubenswrapper[4876]: E1215 08:30:11.705823 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:30:22 crc kubenswrapper[4876]: I1215 08:30:22.447160 4876 scope.go:117] "RemoveContainer" containerID="3ee6972b27e1cf82f92a4324425ec4564dd6322f980fdc4da9af6dd2f57c03d6" Dec 15 08:30:22 crc kubenswrapper[4876]: I1215 08:30:22.706244 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:30:22 crc kubenswrapper[4876]: E1215 08:30:22.706808 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:30:36 crc kubenswrapper[4876]: I1215 08:30:36.705060 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:30:37 crc kubenswrapper[4876]: I1215 08:30:37.411656 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"1eab63760fd711d89164cf5cecc7ad58b6c061babe54b71fb0d87b4bc12245dd"} Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.508940 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6fdf89db6c-xttkz"] Dec 15 08:30:43 crc kubenswrapper[4876]: E1215 08:30:43.509971 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2637c57c-34ea-4a9a-860f-b8cc1b5adec4" containerName="collect-profiles" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.509985 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="2637c57c-34ea-4a9a-860f-b8cc1b5adec4" containerName="collect-profiles" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.510141 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="2637c57c-34ea-4a9a-860f-b8cc1b5adec4" containerName="collect-profiles" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.510842 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.514657 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.515738 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.515864 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-qnzzv" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.515888 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.520309 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.523343 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fdf89db6c-xttkz"] Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.570389 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l79nh\" (UniqueName: \"kubernetes.io/projected/60c3236a-de7c-405f-bc1b-905345d39b78-kube-api-access-l79nh\") pod \"dnsmasq-dns-6fdf89db6c-xttkz\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.570578 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-dns-svc\") pod \"dnsmasq-dns-6fdf89db6c-xttkz\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.570756 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-config\") pod \"dnsmasq-dns-6fdf89db6c-xttkz\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.671261 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l79nh\" (UniqueName: \"kubernetes.io/projected/60c3236a-de7c-405f-bc1b-905345d39b78-kube-api-access-l79nh\") pod \"dnsmasq-dns-6fdf89db6c-xttkz\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.671560 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-dns-svc\") pod \"dnsmasq-dns-6fdf89db6c-xttkz\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.671731 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-config\") pod \"dnsmasq-dns-6fdf89db6c-xttkz\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.672446 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-dns-svc\") pod \"dnsmasq-dns-6fdf89db6c-xttkz\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.672837 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-config\") pod \"dnsmasq-dns-6fdf89db6c-xttkz\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.696262 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l79nh\" (UniqueName: \"kubernetes.io/projected/60c3236a-de7c-405f-bc1b-905345d39b78-kube-api-access-l79nh\") pod \"dnsmasq-dns-6fdf89db6c-xttkz\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.821966 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57484c487-vwjxf"] Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.824788 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.827657 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.837210 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57484c487-vwjxf"] Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.876468 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-config\") pod \"dnsmasq-dns-57484c487-vwjxf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.876749 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-dns-svc\") pod \"dnsmasq-dns-57484c487-vwjxf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.876837 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwtxl\" (UniqueName: \"kubernetes.io/projected/39086c72-7371-4c97-af08-cac754e741bf-kube-api-access-dwtxl\") pod \"dnsmasq-dns-57484c487-vwjxf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.977805 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-dns-svc\") pod \"dnsmasq-dns-57484c487-vwjxf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.978155 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwtxl\" (UniqueName: \"kubernetes.io/projected/39086c72-7371-4c97-af08-cac754e741bf-kube-api-access-dwtxl\") pod \"dnsmasq-dns-57484c487-vwjxf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.978209 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-config\") pod \"dnsmasq-dns-57484c487-vwjxf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.979063 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-config\") pod \"dnsmasq-dns-57484c487-vwjxf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:43 crc kubenswrapper[4876]: I1215 08:30:43.979262 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-dns-svc\") pod \"dnsmasq-dns-57484c487-vwjxf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.037050 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwtxl\" (UniqueName: \"kubernetes.io/projected/39086c72-7371-4c97-af08-cac754e741bf-kube-api-access-dwtxl\") pod \"dnsmasq-dns-57484c487-vwjxf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.151548 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.479657 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fdf89db6c-xttkz"] Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.635580 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57484c487-vwjxf"] Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.697456 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.698694 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.700834 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.701068 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.701410 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.702017 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.702228 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-b7t85" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.786802 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.892905 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.892961 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/566357c8-1527-4312-a95f-10ede2e5a00d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.893128 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.893167 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.893223 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.893324 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-279xl\" (UniqueName: \"kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-kube-api-access-279xl\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.893402 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.893434 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/566357c8-1527-4312-a95f-10ede2e5a00d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.893478 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.992483 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.993641 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.994580 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-279xl\" (UniqueName: \"kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-kube-api-access-279xl\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.994661 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.994680 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/566357c8-1527-4312-a95f-10ede2e5a00d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.994712 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.994760 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.994780 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/566357c8-1527-4312-a95f-10ede2e5a00d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.994805 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.994831 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.994852 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.995952 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.997459 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.997524 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.996228 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.996630 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 15 08:30:44 crc kubenswrapper[4876]: I1215 08:30:44.996744 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:44.999577 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.000358 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.002968 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.003001 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/68b45e4116eb64b4120692a1a5de30fd6206f1613d1f7b136554621a5204b308/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.004741 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/566357c8-1527-4312-a95f-10ede2e5a00d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.004807 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-vkzsl" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.015954 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/566357c8-1527-4312-a95f-10ede2e5a00d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.016645 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.021299 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-279xl\" (UniqueName: \"kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-kube-api-access-279xl\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.046727 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.074139 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") pod \"rabbitmq-server-0\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " pod="openstack/rabbitmq-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.097453 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d0aa962d-8969-416e-b841-8a1ad7c4b077-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.097539 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.097585 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.097615 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.097733 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwfml\" (UniqueName: \"kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-kube-api-access-nwfml\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.097763 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.097789 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.097857 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d0aa962d-8969-416e-b841-8a1ad7c4b077-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.097916 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.199395 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.199470 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d0aa962d-8969-416e-b841-8a1ad7c4b077-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.199527 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.199547 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.199588 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.199658 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwfml\" (UniqueName: \"kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-kube-api-access-nwfml\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.199677 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.199697 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.199757 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d0aa962d-8969-416e-b841-8a1ad7c4b077-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.199914 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.200268 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.201598 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.202144 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.205096 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d0aa962d-8969-416e-b841-8a1ad7c4b077-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.207630 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d0aa962d-8969-416e-b841-8a1ad7c4b077-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.208742 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.208773 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/46899d63f101f9fe6a4e0f21f7f210f92524f3b9278717f0edc1ca1fbedccb40/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.222991 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwfml\" (UniqueName: \"kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-kube-api-access-nwfml\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.223295 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.252531 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.336622 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.391530 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.467063 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57484c487-vwjxf" event={"ID":"39086c72-7371-4c97-af08-cac754e741bf","Type":"ContainerStarted","Data":"a56ab57cc910e27221d56df7733574b4ffe2c4487ad9b77438a41e8e0679ac02"} Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.472247 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" event={"ID":"60c3236a-de7c-405f-bc1b-905345d39b78","Type":"ContainerStarted","Data":"5ab3560dca025e3aa442e69ccd308954cdec83670f14fb6ca2841e84969890b5"} Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.748789 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 08:30:45 crc kubenswrapper[4876]: W1215 08:30:45.756348 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0aa962d_8969_416e_b841_8a1ad7c4b077.slice/crio-06b07fb2392fdb7e3cfd9fc37e4c9a8fbc34d845da61c391c00b17c86664da63 WatchSource:0}: Error finding container 06b07fb2392fdb7e3cfd9fc37e4c9a8fbc34d845da61c391c00b17c86664da63: Status 404 returned error can't find the container with id 06b07fb2392fdb7e3cfd9fc37e4c9a8fbc34d845da61c391c00b17c86664da63 Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.802446 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.884788 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.886342 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.889867 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.891588 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-w8npd" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.892096 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.892436 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.894633 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 15 08:30:45 crc kubenswrapper[4876]: I1215 08:30:45.904190 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.013031 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd41608-1877-4f89-8a43-1d902a246616-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.013153 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ebd41608-1877-4f89-8a43-1d902a246616-kolla-config\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.013180 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f10dc481-4514-49d0-b836-88afb231ae45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f10dc481-4514-49d0-b836-88afb231ae45\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.013234 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdrxf\" (UniqueName: \"kubernetes.io/projected/ebd41608-1877-4f89-8a43-1d902a246616-kube-api-access-pdrxf\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.013274 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebd41608-1877-4f89-8a43-1d902a246616-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.013323 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ebd41608-1877-4f89-8a43-1d902a246616-config-data-default\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.013344 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd41608-1877-4f89-8a43-1d902a246616-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.013465 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ebd41608-1877-4f89-8a43-1d902a246616-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.115250 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ebd41608-1877-4f89-8a43-1d902a246616-config-data-default\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.115306 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd41608-1877-4f89-8a43-1d902a246616-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.115336 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ebd41608-1877-4f89-8a43-1d902a246616-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.115419 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd41608-1877-4f89-8a43-1d902a246616-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.115443 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ebd41608-1877-4f89-8a43-1d902a246616-kolla-config\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.115470 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f10dc481-4514-49d0-b836-88afb231ae45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f10dc481-4514-49d0-b836-88afb231ae45\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.115516 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdrxf\" (UniqueName: \"kubernetes.io/projected/ebd41608-1877-4f89-8a43-1d902a246616-kube-api-access-pdrxf\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.115544 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebd41608-1877-4f89-8a43-1d902a246616-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.116210 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ebd41608-1877-4f89-8a43-1d902a246616-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.116368 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ebd41608-1877-4f89-8a43-1d902a246616-config-data-default\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.116993 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ebd41608-1877-4f89-8a43-1d902a246616-kolla-config\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.117258 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebd41608-1877-4f89-8a43-1d902a246616-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.120479 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd41608-1877-4f89-8a43-1d902a246616-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.123248 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.123291 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f10dc481-4514-49d0-b836-88afb231ae45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f10dc481-4514-49d0-b836-88afb231ae45\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a28f917bbeb1742ad7db4d36d6924edd271c357d8977cab1183c2a49665965aa/globalmount\"" pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.138134 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdrxf\" (UniqueName: \"kubernetes.io/projected/ebd41608-1877-4f89-8a43-1d902a246616-kube-api-access-pdrxf\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.140343 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd41608-1877-4f89-8a43-1d902a246616-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.166583 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f10dc481-4514-49d0-b836-88afb231ae45\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f10dc481-4514-49d0-b836-88afb231ae45\") pod \"openstack-galera-0\" (UID: \"ebd41608-1877-4f89-8a43-1d902a246616\") " pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.214665 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.219844 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.220754 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.227431 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.230778 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-kh4dn" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.247287 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.318313 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28wqz\" (UniqueName: \"kubernetes.io/projected/b3b4579c-ee28-4f00-b7da-d8f775335c21-kube-api-access-28wqz\") pod \"memcached-0\" (UID: \"b3b4579c-ee28-4f00-b7da-d8f775335c21\") " pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.318496 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b3b4579c-ee28-4f00-b7da-d8f775335c21-kolla-config\") pod \"memcached-0\" (UID: \"b3b4579c-ee28-4f00-b7da-d8f775335c21\") " pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.318520 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b3b4579c-ee28-4f00-b7da-d8f775335c21-config-data\") pod \"memcached-0\" (UID: \"b3b4579c-ee28-4f00-b7da-d8f775335c21\") " pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.420575 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b3b4579c-ee28-4f00-b7da-d8f775335c21-kolla-config\") pod \"memcached-0\" (UID: \"b3b4579c-ee28-4f00-b7da-d8f775335c21\") " pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.420610 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b3b4579c-ee28-4f00-b7da-d8f775335c21-config-data\") pod \"memcached-0\" (UID: \"b3b4579c-ee28-4f00-b7da-d8f775335c21\") " pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.420675 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28wqz\" (UniqueName: \"kubernetes.io/projected/b3b4579c-ee28-4f00-b7da-d8f775335c21-kube-api-access-28wqz\") pod \"memcached-0\" (UID: \"b3b4579c-ee28-4f00-b7da-d8f775335c21\") " pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.422317 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b3b4579c-ee28-4f00-b7da-d8f775335c21-kolla-config\") pod \"memcached-0\" (UID: \"b3b4579c-ee28-4f00-b7da-d8f775335c21\") " pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.436614 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b3b4579c-ee28-4f00-b7da-d8f775335c21-config-data\") pod \"memcached-0\" (UID: \"b3b4579c-ee28-4f00-b7da-d8f775335c21\") " pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.440060 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28wqz\" (UniqueName: \"kubernetes.io/projected/b3b4579c-ee28-4f00-b7da-d8f775335c21-kube-api-access-28wqz\") pod \"memcached-0\" (UID: \"b3b4579c-ee28-4f00-b7da-d8f775335c21\") " pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.491675 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"566357c8-1527-4312-a95f-10ede2e5a00d","Type":"ContainerStarted","Data":"6c20c0f9ace13fcba598c1ef3ee10d73982b71ce1f7905549bb900769257127e"} Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.494200 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d0aa962d-8969-416e-b841-8a1ad7c4b077","Type":"ContainerStarted","Data":"06b07fb2392fdb7e3cfd9fc37e4c9a8fbc34d845da61c391c00b17c86664da63"} Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.556523 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 15 08:30:46 crc kubenswrapper[4876]: I1215 08:30:46.952886 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.084768 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.413305 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.415986 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.418527 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.422835 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.425746 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-lllr5" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.426258 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.439988 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.507692 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ebd41608-1877-4f89-8a43-1d902a246616","Type":"ContainerStarted","Data":"ed802d25112f8cd45604eeda33b0e1c711b7919ccb2013a6dcbd2e11477ec212"} Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.509066 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"b3b4579c-ee28-4f00-b7da-d8f775335c21","Type":"ContainerStarted","Data":"f7958dd47ec41a2107dfd4b31d562316a1763f52d3745809b85f7c0bf2ca3f70"} Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.569885 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9de66337-70bb-40c0-8d62-530c8e681ead\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de66337-70bb-40c0-8d62-530c8e681ead\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.570235 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e90da418-4119-4738-85b2-58075f0eac3a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.570497 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e90da418-4119-4738-85b2-58075f0eac3a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.570581 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e90da418-4119-4738-85b2-58075f0eac3a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.570659 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e90da418-4119-4738-85b2-58075f0eac3a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.570740 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzg78\" (UniqueName: \"kubernetes.io/projected/e90da418-4119-4738-85b2-58075f0eac3a-kube-api-access-lzg78\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.570835 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e90da418-4119-4738-85b2-58075f0eac3a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.570901 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e90da418-4119-4738-85b2-58075f0eac3a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.672010 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e90da418-4119-4738-85b2-58075f0eac3a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.672059 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e90da418-4119-4738-85b2-58075f0eac3a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.672084 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e90da418-4119-4738-85b2-58075f0eac3a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.672135 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzg78\" (UniqueName: \"kubernetes.io/projected/e90da418-4119-4738-85b2-58075f0eac3a-kube-api-access-lzg78\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.672156 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e90da418-4119-4738-85b2-58075f0eac3a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.672180 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e90da418-4119-4738-85b2-58075f0eac3a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.672224 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9de66337-70bb-40c0-8d62-530c8e681ead\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de66337-70bb-40c0-8d62-530c8e681ead\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.672249 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e90da418-4119-4738-85b2-58075f0eac3a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.674089 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e90da418-4119-4738-85b2-58075f0eac3a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.674331 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e90da418-4119-4738-85b2-58075f0eac3a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.674980 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e90da418-4119-4738-85b2-58075f0eac3a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.676309 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e90da418-4119-4738-85b2-58075f0eac3a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.678846 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.678884 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9de66337-70bb-40c0-8d62-530c8e681ead\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de66337-70bb-40c0-8d62-530c8e681ead\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3df0d70fc12311278331c056f127b04b2ee3c66c0bdd158eaddb37b91e8c76c7/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.678888 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e90da418-4119-4738-85b2-58075f0eac3a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.679593 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e90da418-4119-4738-85b2-58075f0eac3a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.702020 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzg78\" (UniqueName: \"kubernetes.io/projected/e90da418-4119-4738-85b2-58075f0eac3a-kube-api-access-lzg78\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.732067 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9de66337-70bb-40c0-8d62-530c8e681ead\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9de66337-70bb-40c0-8d62-530c8e681ead\") pod \"openstack-cell1-galera-0\" (UID: \"e90da418-4119-4738-85b2-58075f0eac3a\") " pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:47 crc kubenswrapper[4876]: I1215 08:30:47.754299 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 15 08:30:48 crc kubenswrapper[4876]: I1215 08:30:48.369534 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 15 08:30:48 crc kubenswrapper[4876]: I1215 08:30:48.522068 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e90da418-4119-4738-85b2-58075f0eac3a","Type":"ContainerStarted","Data":"1fb9070153b58ac8e29ebefef09c79247b3a0fa7904f85222a529e0c9903738f"} Dec 15 08:31:07 crc kubenswrapper[4876]: I1215 08:31:07.966513 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5l7pl"] Dec 15 08:31:07 crc kubenswrapper[4876]: I1215 08:31:07.968606 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:07 crc kubenswrapper[4876]: I1215 08:31:07.981429 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5l7pl"] Dec 15 08:31:08 crc kubenswrapper[4876]: I1215 08:31:08.140463 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-utilities\") pod \"redhat-marketplace-5l7pl\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:08 crc kubenswrapper[4876]: I1215 08:31:08.140576 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-catalog-content\") pod \"redhat-marketplace-5l7pl\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:08 crc kubenswrapper[4876]: I1215 08:31:08.140849 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmvxr\" (UniqueName: \"kubernetes.io/projected/d79cdda0-605e-4817-8dfb-cdfd0692d75f-kube-api-access-zmvxr\") pod \"redhat-marketplace-5l7pl\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:08 crc kubenswrapper[4876]: I1215 08:31:08.242921 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-utilities\") pod \"redhat-marketplace-5l7pl\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:08 crc kubenswrapper[4876]: I1215 08:31:08.242978 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-catalog-content\") pod \"redhat-marketplace-5l7pl\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:08 crc kubenswrapper[4876]: I1215 08:31:08.243021 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmvxr\" (UniqueName: \"kubernetes.io/projected/d79cdda0-605e-4817-8dfb-cdfd0692d75f-kube-api-access-zmvxr\") pod \"redhat-marketplace-5l7pl\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:08 crc kubenswrapper[4876]: I1215 08:31:08.243529 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-utilities\") pod \"redhat-marketplace-5l7pl\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:08 crc kubenswrapper[4876]: I1215 08:31:08.243564 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-catalog-content\") pod \"redhat-marketplace-5l7pl\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:08 crc kubenswrapper[4876]: I1215 08:31:08.262903 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmvxr\" (UniqueName: \"kubernetes.io/projected/d79cdda0-605e-4817-8dfb-cdfd0692d75f-kube-api-access-zmvxr\") pod \"redhat-marketplace-5l7pl\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:08 crc kubenswrapper[4876]: I1215 08:31:08.329284 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:08 crc kubenswrapper[4876]: E1215 08:31:08.417122 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:c3a837a7c939c44c9106d2b2c7c72015" Dec 15 08:31:08 crc kubenswrapper[4876]: E1215 08:31:08.417195 4876 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:c3a837a7c939c44c9106d2b2c7c72015" Dec 15 08:31:08 crc kubenswrapper[4876]: E1215 08:31:08.417342 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:c3a837a7c939c44c9106d2b2c7c72015,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n564h564h676h699hcdh67bh66hfdh569h545h648h94h546h696h668h89h96h667h575h595h5d9h584h8dhbdh697h54bhb7h58fh5c9hd8h5cdh5c7q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dwtxl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57484c487-vwjxf_openstack(39086c72-7371-4c97-af08-cac754e741bf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 08:31:08 crc kubenswrapper[4876]: E1215 08:31:08.418567 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57484c487-vwjxf" podUID="39086c72-7371-4c97-af08-cac754e741bf" Dec 15 08:31:08 crc kubenswrapper[4876]: E1215 08:31:08.717870 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:c3a837a7c939c44c9106d2b2c7c72015\\\"\"" pod="openstack/dnsmasq-dns-57484c487-vwjxf" podUID="39086c72-7371-4c97-af08-cac754e741bf" Dec 15 08:31:09 crc kubenswrapper[4876]: I1215 08:31:09.567472 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5l7pl"] Dec 15 08:31:09 crc kubenswrapper[4876]: W1215 08:31:09.579311 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd79cdda0_605e_4817_8dfb_cdfd0692d75f.slice/crio-b5862cd4ad4e31afca92e61401bd64620d900c79d3744fdb8f87c8ef49f23dce WatchSource:0}: Error finding container b5862cd4ad4e31afca92e61401bd64620d900c79d3744fdb8f87c8ef49f23dce: Status 404 returned error can't find the container with id b5862cd4ad4e31afca92e61401bd64620d900c79d3744fdb8f87c8ef49f23dce Dec 15 08:31:09 crc kubenswrapper[4876]: I1215 08:31:09.722926 4876 generic.go:334] "Generic (PLEG): container finished" podID="60c3236a-de7c-405f-bc1b-905345d39b78" containerID="9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713" exitCode=0 Dec 15 08:31:09 crc kubenswrapper[4876]: I1215 08:31:09.722998 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" event={"ID":"60c3236a-de7c-405f-bc1b-905345d39b78","Type":"ContainerDied","Data":"9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713"} Dec 15 08:31:09 crc kubenswrapper[4876]: I1215 08:31:09.724240 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5l7pl" event={"ID":"d79cdda0-605e-4817-8dfb-cdfd0692d75f","Type":"ContainerStarted","Data":"b5862cd4ad4e31afca92e61401bd64620d900c79d3744fdb8f87c8ef49f23dce"} Dec 15 08:31:09 crc kubenswrapper[4876]: I1215 08:31:09.726582 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ebd41608-1877-4f89-8a43-1d902a246616","Type":"ContainerStarted","Data":"265927ccb485a1a13462d5db3ba96c2e0885aad44fbdd944878a4ad2b79c1aa3"} Dec 15 08:31:09 crc kubenswrapper[4876]: I1215 08:31:09.728379 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e90da418-4119-4738-85b2-58075f0eac3a","Type":"ContainerStarted","Data":"7be277aee79a6246e295530e465501927cc08f093632a048294f6a221edf4c8e"} Dec 15 08:31:09 crc kubenswrapper[4876]: I1215 08:31:09.734825 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"b3b4579c-ee28-4f00-b7da-d8f775335c21","Type":"ContainerStarted","Data":"63d6c35417ccc2c2024f14b1e512265bedbec58f70288accf2af85111261c8b0"} Dec 15 08:31:09 crc kubenswrapper[4876]: I1215 08:31:09.735621 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 15 08:31:09 crc kubenswrapper[4876]: I1215 08:31:09.789293 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=1.7897883700000001 podStartE2EDuration="23.789274136s" podCreationTimestamp="2025-12-15 08:30:46 +0000 UTC" firstStartedPulling="2025-12-15 08:30:47.110604235 +0000 UTC m=+5972.681747146" lastFinishedPulling="2025-12-15 08:31:09.110090001 +0000 UTC m=+5994.681232912" observedRunningTime="2025-12-15 08:31:09.780590222 +0000 UTC m=+5995.351733143" watchObservedRunningTime="2025-12-15 08:31:09.789274136 +0000 UTC m=+5995.360417037" Dec 15 08:31:10 crc kubenswrapper[4876]: E1215 08:31:10.390603 4876 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 15 08:31:10 crc kubenswrapper[4876]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/60c3236a-de7c-405f-bc1b-905345d39b78/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 15 08:31:10 crc kubenswrapper[4876]: > podSandboxID="5ab3560dca025e3aa442e69ccd308954cdec83670f14fb6ca2841e84969890b5" Dec 15 08:31:10 crc kubenswrapper[4876]: E1215 08:31:10.391069 4876 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 15 08:31:10 crc kubenswrapper[4876]: container &Container{Name:dnsmasq-dns,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:c3a837a7c939c44c9106d2b2c7c72015,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8chc6h5bh56fh546hb7hc8h67h5bchffh577h697h5b5h5bdh59bhf6hf4h558hb5h578h595h5cchfbh644h59ch7fh654h547h587h5cbh5d5h8fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l79nh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6fdf89db6c-xttkz_openstack(60c3236a-de7c-405f-bc1b-905345d39b78): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/60c3236a-de7c-405f-bc1b-905345d39b78/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 15 08:31:10 crc kubenswrapper[4876]: > logger="UnhandledError" Dec 15 08:31:10 crc kubenswrapper[4876]: E1215 08:31:10.393216 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/60c3236a-de7c-405f-bc1b-905345d39b78/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" podUID="60c3236a-de7c-405f-bc1b-905345d39b78" Dec 15 08:31:10 crc kubenswrapper[4876]: I1215 08:31:10.743585 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"566357c8-1527-4312-a95f-10ede2e5a00d","Type":"ContainerStarted","Data":"47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84"} Dec 15 08:31:10 crc kubenswrapper[4876]: I1215 08:31:10.747419 4876 generic.go:334] "Generic (PLEG): container finished" podID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerID="a419b95e5731d717718ba9c62a3798c78e1206eed81ed7e3a1b9b17e21dd6e26" exitCode=0 Dec 15 08:31:10 crc kubenswrapper[4876]: I1215 08:31:10.747513 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5l7pl" event={"ID":"d79cdda0-605e-4817-8dfb-cdfd0692d75f","Type":"ContainerDied","Data":"a419b95e5731d717718ba9c62a3798c78e1206eed81ed7e3a1b9b17e21dd6e26"} Dec 15 08:31:10 crc kubenswrapper[4876]: I1215 08:31:10.748965 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d0aa962d-8969-416e-b841-8a1ad7c4b077","Type":"ContainerStarted","Data":"2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b"} Dec 15 08:31:10 crc kubenswrapper[4876]: I1215 08:31:10.750293 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 08:31:11 crc kubenswrapper[4876]: I1215 08:31:11.762225 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" event={"ID":"60c3236a-de7c-405f-bc1b-905345d39b78","Type":"ContainerStarted","Data":"c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115"} Dec 15 08:31:11 crc kubenswrapper[4876]: I1215 08:31:11.762886 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:31:11 crc kubenswrapper[4876]: I1215 08:31:11.784650 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" podStartSLOduration=4.124906983 podStartE2EDuration="28.784630783s" podCreationTimestamp="2025-12-15 08:30:43 +0000 UTC" firstStartedPulling="2025-12-15 08:30:44.48997763 +0000 UTC m=+5970.061120541" lastFinishedPulling="2025-12-15 08:31:09.14970143 +0000 UTC m=+5994.720844341" observedRunningTime="2025-12-15 08:31:11.778872328 +0000 UTC m=+5997.350015259" watchObservedRunningTime="2025-12-15 08:31:11.784630783 +0000 UTC m=+5997.355773694" Dec 15 08:31:12 crc kubenswrapper[4876]: I1215 08:31:12.770638 4876 generic.go:334] "Generic (PLEG): container finished" podID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerID="9951c102be15b755b7e522cdbbcce3b387cce730244345ae95bc795870e484ad" exitCode=0 Dec 15 08:31:12 crc kubenswrapper[4876]: I1215 08:31:12.770745 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5l7pl" event={"ID":"d79cdda0-605e-4817-8dfb-cdfd0692d75f","Type":"ContainerDied","Data":"9951c102be15b755b7e522cdbbcce3b387cce730244345ae95bc795870e484ad"} Dec 15 08:31:12 crc kubenswrapper[4876]: I1215 08:31:12.773026 4876 generic.go:334] "Generic (PLEG): container finished" podID="ebd41608-1877-4f89-8a43-1d902a246616" containerID="265927ccb485a1a13462d5db3ba96c2e0885aad44fbdd944878a4ad2b79c1aa3" exitCode=0 Dec 15 08:31:12 crc kubenswrapper[4876]: I1215 08:31:12.773081 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ebd41608-1877-4f89-8a43-1d902a246616","Type":"ContainerDied","Data":"265927ccb485a1a13462d5db3ba96c2e0885aad44fbdd944878a4ad2b79c1aa3"} Dec 15 08:31:12 crc kubenswrapper[4876]: I1215 08:31:12.774923 4876 generic.go:334] "Generic (PLEG): container finished" podID="e90da418-4119-4738-85b2-58075f0eac3a" containerID="7be277aee79a6246e295530e465501927cc08f093632a048294f6a221edf4c8e" exitCode=0 Dec 15 08:31:12 crc kubenswrapper[4876]: I1215 08:31:12.775019 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e90da418-4119-4738-85b2-58075f0eac3a","Type":"ContainerDied","Data":"7be277aee79a6246e295530e465501927cc08f093632a048294f6a221edf4c8e"} Dec 15 08:31:13 crc kubenswrapper[4876]: I1215 08:31:13.784335 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5l7pl" event={"ID":"d79cdda0-605e-4817-8dfb-cdfd0692d75f","Type":"ContainerStarted","Data":"e60522af907ae82c44a1c21c4eef3b1745b72a70e6694a09dc228796cf8246bd"} Dec 15 08:31:13 crc kubenswrapper[4876]: I1215 08:31:13.786506 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ebd41608-1877-4f89-8a43-1d902a246616","Type":"ContainerStarted","Data":"279ddc5dd8a57041d47a85b58f462d480755d7afa6d0f4a3729d53459cf6f0cd"} Dec 15 08:31:13 crc kubenswrapper[4876]: I1215 08:31:13.788694 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e90da418-4119-4738-85b2-58075f0eac3a","Type":"ContainerStarted","Data":"87cf636d196e9eb01a0b11cdabe5f98f648a26760e26e18f05b79d97a4e75b6c"} Dec 15 08:31:13 crc kubenswrapper[4876]: I1215 08:31:13.807683 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5l7pl" podStartSLOduration=4.352267995 podStartE2EDuration="6.807660938s" podCreationTimestamp="2025-12-15 08:31:07 +0000 UTC" firstStartedPulling="2025-12-15 08:31:10.750073409 +0000 UTC m=+5996.321216320" lastFinishedPulling="2025-12-15 08:31:13.205466352 +0000 UTC m=+5998.776609263" observedRunningTime="2025-12-15 08:31:13.801780709 +0000 UTC m=+5999.372923640" watchObservedRunningTime="2025-12-15 08:31:13.807660938 +0000 UTC m=+5999.378803849" Dec 15 08:31:13 crc kubenswrapper[4876]: I1215 08:31:13.851008 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=7.686507808 podStartE2EDuration="29.850989249s" podCreationTimestamp="2025-12-15 08:30:44 +0000 UTC" firstStartedPulling="2025-12-15 08:30:46.985216189 +0000 UTC m=+5972.556359100" lastFinishedPulling="2025-12-15 08:31:09.14969762 +0000 UTC m=+5994.720840541" observedRunningTime="2025-12-15 08:31:13.850517085 +0000 UTC m=+5999.421659996" watchObservedRunningTime="2025-12-15 08:31:13.850989249 +0000 UTC m=+5999.422132160" Dec 15 08:31:13 crc kubenswrapper[4876]: I1215 08:31:13.854700 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.142232917 podStartE2EDuration="27.854682558s" podCreationTimestamp="2025-12-15 08:30:46 +0000 UTC" firstStartedPulling="2025-12-15 08:30:48.401646238 +0000 UTC m=+5973.972789149" lastFinishedPulling="2025-12-15 08:31:09.114095859 +0000 UTC m=+5994.685238790" observedRunningTime="2025-12-15 08:31:13.834515333 +0000 UTC m=+5999.405658254" watchObservedRunningTime="2025-12-15 08:31:13.854682558 +0000 UTC m=+5999.425825469" Dec 15 08:31:16 crc kubenswrapper[4876]: I1215 08:31:16.216256 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 15 08:31:16 crc kubenswrapper[4876]: I1215 08:31:16.216580 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 15 08:31:16 crc kubenswrapper[4876]: I1215 08:31:16.558388 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 15 08:31:17 crc kubenswrapper[4876]: I1215 08:31:17.755240 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 15 08:31:17 crc kubenswrapper[4876]: I1215 08:31:17.755592 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 15 08:31:18 crc kubenswrapper[4876]: I1215 08:31:18.329402 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:18 crc kubenswrapper[4876]: I1215 08:31:18.329447 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:18 crc kubenswrapper[4876]: I1215 08:31:18.369256 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:18 crc kubenswrapper[4876]: I1215 08:31:18.829304 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:31:18 crc kubenswrapper[4876]: I1215 08:31:18.879862 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:18 crc kubenswrapper[4876]: I1215 08:31:18.951837 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5l7pl"] Dec 15 08:31:19 crc kubenswrapper[4876]: I1215 08:31:19.812746 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 15 08:31:19 crc kubenswrapper[4876]: I1215 08:31:19.899612 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 15 08:31:20 crc kubenswrapper[4876]: I1215 08:31:20.299446 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 15 08:31:20 crc kubenswrapper[4876]: I1215 08:31:20.360538 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 15 08:31:20 crc kubenswrapper[4876]: I1215 08:31:20.836524 4876 generic.go:334] "Generic (PLEG): container finished" podID="39086c72-7371-4c97-af08-cac754e741bf" containerID="e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa" exitCode=0 Dec 15 08:31:20 crc kubenswrapper[4876]: I1215 08:31:20.836601 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57484c487-vwjxf" event={"ID":"39086c72-7371-4c97-af08-cac754e741bf","Type":"ContainerDied","Data":"e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa"} Dec 15 08:31:20 crc kubenswrapper[4876]: I1215 08:31:20.837023 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5l7pl" podUID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerName="registry-server" containerID="cri-o://e60522af907ae82c44a1c21c4eef3b1745b72a70e6694a09dc228796cf8246bd" gracePeriod=2 Dec 15 08:31:21 crc kubenswrapper[4876]: I1215 08:31:21.845602 4876 generic.go:334] "Generic (PLEG): container finished" podID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerID="e60522af907ae82c44a1c21c4eef3b1745b72a70e6694a09dc228796cf8246bd" exitCode=0 Dec 15 08:31:21 crc kubenswrapper[4876]: I1215 08:31:21.846346 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5l7pl" event={"ID":"d79cdda0-605e-4817-8dfb-cdfd0692d75f","Type":"ContainerDied","Data":"e60522af907ae82c44a1c21c4eef3b1745b72a70e6694a09dc228796cf8246bd"} Dec 15 08:31:24 crc kubenswrapper[4876]: I1215 08:31:24.865801 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57484c487-vwjxf" event={"ID":"39086c72-7371-4c97-af08-cac754e741bf","Type":"ContainerStarted","Data":"1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff"} Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.493934 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.609865 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmvxr\" (UniqueName: \"kubernetes.io/projected/d79cdda0-605e-4817-8dfb-cdfd0692d75f-kube-api-access-zmvxr\") pod \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.609995 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-catalog-content\") pod \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.610082 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-utilities\") pod \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\" (UID: \"d79cdda0-605e-4817-8dfb-cdfd0692d75f\") " Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.611040 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-utilities" (OuterVolumeSpecName: "utilities") pod "d79cdda0-605e-4817-8dfb-cdfd0692d75f" (UID: "d79cdda0-605e-4817-8dfb-cdfd0692d75f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.618366 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d79cdda0-605e-4817-8dfb-cdfd0692d75f-kube-api-access-zmvxr" (OuterVolumeSpecName: "kube-api-access-zmvxr") pod "d79cdda0-605e-4817-8dfb-cdfd0692d75f" (UID: "d79cdda0-605e-4817-8dfb-cdfd0692d75f"). InnerVolumeSpecName "kube-api-access-zmvxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.642694 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d79cdda0-605e-4817-8dfb-cdfd0692d75f" (UID: "d79cdda0-605e-4817-8dfb-cdfd0692d75f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.711412 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmvxr\" (UniqueName: \"kubernetes.io/projected/d79cdda0-605e-4817-8dfb-cdfd0692d75f-kube-api-access-zmvxr\") on node \"crc\" DevicePath \"\"" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.711444 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.711455 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d79cdda0-605e-4817-8dfb-cdfd0692d75f-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.878389 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5l7pl" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.878435 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5l7pl" event={"ID":"d79cdda0-605e-4817-8dfb-cdfd0692d75f","Type":"ContainerDied","Data":"b5862cd4ad4e31afca92e61401bd64620d900c79d3744fdb8f87c8ef49f23dce"} Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.878485 4876 scope.go:117] "RemoveContainer" containerID="e60522af907ae82c44a1c21c4eef3b1745b72a70e6694a09dc228796cf8246bd" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.878583 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.900698 4876 scope.go:117] "RemoveContainer" containerID="9951c102be15b755b7e522cdbbcce3b387cce730244345ae95bc795870e484ad" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.906511 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57484c487-vwjxf" podStartSLOduration=-9223371993.948284 podStartE2EDuration="42.906491043s" podCreationTimestamp="2025-12-15 08:30:43 +0000 UTC" firstStartedPulling="2025-12-15 08:30:44.638901392 +0000 UTC m=+5970.210044303" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:31:25.896970246 +0000 UTC m=+6011.468113167" watchObservedRunningTime="2025-12-15 08:31:25.906491043 +0000 UTC m=+6011.477633954" Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.919449 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5l7pl"] Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.929345 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5l7pl"] Dec 15 08:31:25 crc kubenswrapper[4876]: I1215 08:31:25.956998 4876 scope.go:117] "RemoveContainer" containerID="a419b95e5731d717718ba9c62a3798c78e1206eed81ed7e3a1b9b17e21dd6e26" Dec 15 08:31:26 crc kubenswrapper[4876]: I1215 08:31:26.713941 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" path="/var/lib/kubelet/pods/d79cdda0-605e-4817-8dfb-cdfd0692d75f/volumes" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.155320 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.215239 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fdf89db6c-xttkz"] Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.215851 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" podUID="60c3236a-de7c-405f-bc1b-905345d39b78" containerName="dnsmasq-dns" containerID="cri-o://c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115" gracePeriod=10 Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.599413 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.687906 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l79nh\" (UniqueName: \"kubernetes.io/projected/60c3236a-de7c-405f-bc1b-905345d39b78-kube-api-access-l79nh\") pod \"60c3236a-de7c-405f-bc1b-905345d39b78\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.688304 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-dns-svc\") pod \"60c3236a-de7c-405f-bc1b-905345d39b78\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.688420 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-config\") pod \"60c3236a-de7c-405f-bc1b-905345d39b78\" (UID: \"60c3236a-de7c-405f-bc1b-905345d39b78\") " Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.697090 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60c3236a-de7c-405f-bc1b-905345d39b78-kube-api-access-l79nh" (OuterVolumeSpecName: "kube-api-access-l79nh") pod "60c3236a-de7c-405f-bc1b-905345d39b78" (UID: "60c3236a-de7c-405f-bc1b-905345d39b78"). InnerVolumeSpecName "kube-api-access-l79nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.725911 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "60c3236a-de7c-405f-bc1b-905345d39b78" (UID: "60c3236a-de7c-405f-bc1b-905345d39b78"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.726018 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-config" (OuterVolumeSpecName: "config") pod "60c3236a-de7c-405f-bc1b-905345d39b78" (UID: "60c3236a-de7c-405f-bc1b-905345d39b78"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.790714 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.790749 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l79nh\" (UniqueName: \"kubernetes.io/projected/60c3236a-de7c-405f-bc1b-905345d39b78-kube-api-access-l79nh\") on node \"crc\" DevicePath \"\"" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.790760 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/60c3236a-de7c-405f-bc1b-905345d39b78-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.907316 4876 generic.go:334] "Generic (PLEG): container finished" podID="60c3236a-de7c-405f-bc1b-905345d39b78" containerID="c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115" exitCode=0 Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.907368 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" event={"ID":"60c3236a-de7c-405f-bc1b-905345d39b78","Type":"ContainerDied","Data":"c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115"} Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.907398 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.907419 4876 scope.go:117] "RemoveContainer" containerID="c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.907406 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fdf89db6c-xttkz" event={"ID":"60c3236a-de7c-405f-bc1b-905345d39b78","Type":"ContainerDied","Data":"5ab3560dca025e3aa442e69ccd308954cdec83670f14fb6ca2841e84969890b5"} Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.938714 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fdf89db6c-xttkz"] Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.959311 4876 scope.go:117] "RemoveContainer" containerID="9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.973897 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6fdf89db6c-xttkz"] Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.990650 4876 scope.go:117] "RemoveContainer" containerID="c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115" Dec 15 08:31:29 crc kubenswrapper[4876]: E1215 08:31:29.993226 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115\": container with ID starting with c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115 not found: ID does not exist" containerID="c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.993259 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115"} err="failed to get container status \"c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115\": rpc error: code = NotFound desc = could not find container \"c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115\": container with ID starting with c7175ee27b91d065b8df8482cbf7a775e1d5351661e7f8711f63e7ddcdcaf115 not found: ID does not exist" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.993279 4876 scope.go:117] "RemoveContainer" containerID="9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713" Dec 15 08:31:29 crc kubenswrapper[4876]: E1215 08:31:29.997200 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713\": container with ID starting with 9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713 not found: ID does not exist" containerID="9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713" Dec 15 08:31:29 crc kubenswrapper[4876]: I1215 08:31:29.997232 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713"} err="failed to get container status \"9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713\": rpc error: code = NotFound desc = could not find container \"9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713\": container with ID starting with 9d469162be895a886bd6b95e7b7aba3704743ac4860eb56f22dca976bd396713 not found: ID does not exist" Dec 15 08:31:30 crc kubenswrapper[4876]: I1215 08:31:30.713767 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60c3236a-de7c-405f-bc1b-905345d39b78" path="/var/lib/kubelet/pods/60c3236a-de7c-405f-bc1b-905345d39b78/volumes" Dec 15 08:31:42 crc kubenswrapper[4876]: I1215 08:31:42.996171 4876 generic.go:334] "Generic (PLEG): container finished" podID="d0aa962d-8969-416e-b841-8a1ad7c4b077" containerID="2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b" exitCode=0 Dec 15 08:31:42 crc kubenswrapper[4876]: I1215 08:31:42.996242 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d0aa962d-8969-416e-b841-8a1ad7c4b077","Type":"ContainerDied","Data":"2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b"} Dec 15 08:31:42 crc kubenswrapper[4876]: I1215 08:31:42.998377 4876 generic.go:334] "Generic (PLEG): container finished" podID="566357c8-1527-4312-a95f-10ede2e5a00d" containerID="47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84" exitCode=0 Dec 15 08:31:42 crc kubenswrapper[4876]: I1215 08:31:42.998409 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"566357c8-1527-4312-a95f-10ede2e5a00d","Type":"ContainerDied","Data":"47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84"} Dec 15 08:31:44 crc kubenswrapper[4876]: I1215 08:31:44.007341 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"566357c8-1527-4312-a95f-10ede2e5a00d","Type":"ContainerStarted","Data":"738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3"} Dec 15 08:31:44 crc kubenswrapper[4876]: I1215 08:31:44.007806 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 15 08:31:44 crc kubenswrapper[4876]: I1215 08:31:44.009294 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d0aa962d-8969-416e-b841-8a1ad7c4b077","Type":"ContainerStarted","Data":"aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f"} Dec 15 08:31:44 crc kubenswrapper[4876]: I1215 08:31:44.009894 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:31:44 crc kubenswrapper[4876]: I1215 08:31:44.058003 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.760416801 podStartE2EDuration="1m1.057980488s" podCreationTimestamp="2025-12-15 08:30:43 +0000 UTC" firstStartedPulling="2025-12-15 08:30:45.819839411 +0000 UTC m=+5971.390982322" lastFinishedPulling="2025-12-15 08:31:09.117403098 +0000 UTC m=+5994.688546009" observedRunningTime="2025-12-15 08:31:44.033646481 +0000 UTC m=+6029.604789422" watchObservedRunningTime="2025-12-15 08:31:44.057980488 +0000 UTC m=+6029.629123419" Dec 15 08:31:44 crc kubenswrapper[4876]: I1215 08:31:44.059195 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.712016694 podStartE2EDuration="1m1.059188391s" podCreationTimestamp="2025-12-15 08:30:43 +0000 UTC" firstStartedPulling="2025-12-15 08:30:45.758144394 +0000 UTC m=+5971.329287295" lastFinishedPulling="2025-12-15 08:31:09.105316081 +0000 UTC m=+5994.676458992" observedRunningTime="2025-12-15 08:31:44.055125461 +0000 UTC m=+6029.626268392" watchObservedRunningTime="2025-12-15 08:31:44.059188391 +0000 UTC m=+6029.630331292" Dec 15 08:31:55 crc kubenswrapper[4876]: I1215 08:31:55.340378 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 15 08:31:55 crc kubenswrapper[4876]: I1215 08:31:55.397358 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:00 crc kubenswrapper[4876]: I1215 08:32:00.960562 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55db7cd99c-lvnxm"] Dec 15 08:32:00 crc kubenswrapper[4876]: E1215 08:32:00.961494 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerName="registry-server" Dec 15 08:32:00 crc kubenswrapper[4876]: I1215 08:32:00.961516 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerName="registry-server" Dec 15 08:32:00 crc kubenswrapper[4876]: E1215 08:32:00.961529 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60c3236a-de7c-405f-bc1b-905345d39b78" containerName="init" Dec 15 08:32:00 crc kubenswrapper[4876]: I1215 08:32:00.961535 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="60c3236a-de7c-405f-bc1b-905345d39b78" containerName="init" Dec 15 08:32:00 crc kubenswrapper[4876]: E1215 08:32:00.961556 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerName="extract-content" Dec 15 08:32:00 crc kubenswrapper[4876]: I1215 08:32:00.961563 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerName="extract-content" Dec 15 08:32:00 crc kubenswrapper[4876]: E1215 08:32:00.961577 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerName="extract-utilities" Dec 15 08:32:00 crc kubenswrapper[4876]: I1215 08:32:00.961583 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerName="extract-utilities" Dec 15 08:32:00 crc kubenswrapper[4876]: E1215 08:32:00.961595 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60c3236a-de7c-405f-bc1b-905345d39b78" containerName="dnsmasq-dns" Dec 15 08:32:00 crc kubenswrapper[4876]: I1215 08:32:00.961600 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="60c3236a-de7c-405f-bc1b-905345d39b78" containerName="dnsmasq-dns" Dec 15 08:32:00 crc kubenswrapper[4876]: I1215 08:32:00.961851 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="60c3236a-de7c-405f-bc1b-905345d39b78" containerName="dnsmasq-dns" Dec 15 08:32:00 crc kubenswrapper[4876]: I1215 08:32:00.961879 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d79cdda0-605e-4817-8dfb-cdfd0692d75f" containerName="registry-server" Dec 15 08:32:00 crc kubenswrapper[4876]: I1215 08:32:00.962765 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:00 crc kubenswrapper[4876]: I1215 08:32:00.971634 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55db7cd99c-lvnxm"] Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.078931 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-dns-svc\") pod \"dnsmasq-dns-55db7cd99c-lvnxm\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.079003 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-config\") pod \"dnsmasq-dns-55db7cd99c-lvnxm\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.079029 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9m8v\" (UniqueName: \"kubernetes.io/projected/42352742-25a2-42ee-b8b0-7e2e8b074ec2-kube-api-access-p9m8v\") pod \"dnsmasq-dns-55db7cd99c-lvnxm\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.180898 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-dns-svc\") pod \"dnsmasq-dns-55db7cd99c-lvnxm\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.180982 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-config\") pod \"dnsmasq-dns-55db7cd99c-lvnxm\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.181008 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9m8v\" (UniqueName: \"kubernetes.io/projected/42352742-25a2-42ee-b8b0-7e2e8b074ec2-kube-api-access-p9m8v\") pod \"dnsmasq-dns-55db7cd99c-lvnxm\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.182021 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-config\") pod \"dnsmasq-dns-55db7cd99c-lvnxm\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.182046 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-dns-svc\") pod \"dnsmasq-dns-55db7cd99c-lvnxm\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.204470 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9m8v\" (UniqueName: \"kubernetes.io/projected/42352742-25a2-42ee-b8b0-7e2e8b074ec2-kube-api-access-p9m8v\") pod \"dnsmasq-dns-55db7cd99c-lvnxm\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.284097 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.608615 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 08:32:01 crc kubenswrapper[4876]: I1215 08:32:01.751039 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55db7cd99c-lvnxm"] Dec 15 08:32:01 crc kubenswrapper[4876]: W1215 08:32:01.754718 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42352742_25a2_42ee_b8b0_7e2e8b074ec2.slice/crio-50974e266a4d87c8cb2be3bd5c6c78b42aea8a1726081b4b4c880eaab9dc467c WatchSource:0}: Error finding container 50974e266a4d87c8cb2be3bd5c6c78b42aea8a1726081b4b4c880eaab9dc467c: Status 404 returned error can't find the container with id 50974e266a4d87c8cb2be3bd5c6c78b42aea8a1726081b4b4c880eaab9dc467c Dec 15 08:32:02 crc kubenswrapper[4876]: I1215 08:32:02.135471 4876 generic.go:334] "Generic (PLEG): container finished" podID="42352742-25a2-42ee-b8b0-7e2e8b074ec2" containerID="09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8" exitCode=0 Dec 15 08:32:02 crc kubenswrapper[4876]: I1215 08:32:02.135575 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" event={"ID":"42352742-25a2-42ee-b8b0-7e2e8b074ec2","Type":"ContainerDied","Data":"09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8"} Dec 15 08:32:02 crc kubenswrapper[4876]: I1215 08:32:02.135871 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" event={"ID":"42352742-25a2-42ee-b8b0-7e2e8b074ec2","Type":"ContainerStarted","Data":"50974e266a4d87c8cb2be3bd5c6c78b42aea8a1726081b4b4c880eaab9dc467c"} Dec 15 08:32:02 crc kubenswrapper[4876]: I1215 08:32:02.395553 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 08:32:03 crc kubenswrapper[4876]: I1215 08:32:03.143253 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" event={"ID":"42352742-25a2-42ee-b8b0-7e2e8b074ec2","Type":"ContainerStarted","Data":"c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a"} Dec 15 08:32:03 crc kubenswrapper[4876]: I1215 08:32:03.143401 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:03 crc kubenswrapper[4876]: I1215 08:32:03.174295 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" podStartSLOduration=3.174246223 podStartE2EDuration="3.174246223s" podCreationTimestamp="2025-12-15 08:32:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:32:03.158747024 +0000 UTC m=+6048.729889945" watchObservedRunningTime="2025-12-15 08:32:03.174246223 +0000 UTC m=+6048.745389134" Dec 15 08:32:03 crc kubenswrapper[4876]: I1215 08:32:03.376040 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="566357c8-1527-4312-a95f-10ede2e5a00d" containerName="rabbitmq" containerID="cri-o://738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3" gracePeriod=604799 Dec 15 08:32:04 crc kubenswrapper[4876]: I1215 08:32:04.061819 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="d0aa962d-8969-416e-b841-8a1ad7c4b077" containerName="rabbitmq" containerID="cri-o://aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f" gracePeriod=604799 Dec 15 08:32:05 crc kubenswrapper[4876]: I1215 08:32:05.338021 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="566357c8-1527-4312-a95f-10ede2e5a00d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.245:5672: connect: connection refused" Dec 15 08:32:05 crc kubenswrapper[4876]: I1215 08:32:05.392612 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="d0aa962d-8969-416e-b841-8a1ad7c4b077" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.246:5672: connect: connection refused" Dec 15 08:32:09 crc kubenswrapper[4876]: I1215 08:32:09.896663 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.009270 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-plugins-conf\") pod \"566357c8-1527-4312-a95f-10ede2e5a00d\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.009471 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") pod \"566357c8-1527-4312-a95f-10ede2e5a00d\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.009501 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/566357c8-1527-4312-a95f-10ede2e5a00d-pod-info\") pod \"566357c8-1527-4312-a95f-10ede2e5a00d\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.009527 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/566357c8-1527-4312-a95f-10ede2e5a00d-erlang-cookie-secret\") pod \"566357c8-1527-4312-a95f-10ede2e5a00d\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.009551 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-erlang-cookie\") pod \"566357c8-1527-4312-a95f-10ede2e5a00d\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.009590 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279xl\" (UniqueName: \"kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-kube-api-access-279xl\") pod \"566357c8-1527-4312-a95f-10ede2e5a00d\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.009641 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-confd\") pod \"566357c8-1527-4312-a95f-10ede2e5a00d\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.009681 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-plugins\") pod \"566357c8-1527-4312-a95f-10ede2e5a00d\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.009704 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-server-conf\") pod \"566357c8-1527-4312-a95f-10ede2e5a00d\" (UID: \"566357c8-1527-4312-a95f-10ede2e5a00d\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.009931 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "566357c8-1527-4312-a95f-10ede2e5a00d" (UID: "566357c8-1527-4312-a95f-10ede2e5a00d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.010930 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "566357c8-1527-4312-a95f-10ede2e5a00d" (UID: "566357c8-1527-4312-a95f-10ede2e5a00d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.011258 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "566357c8-1527-4312-a95f-10ede2e5a00d" (UID: "566357c8-1527-4312-a95f-10ede2e5a00d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.018780 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/566357c8-1527-4312-a95f-10ede2e5a00d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "566357c8-1527-4312-a95f-10ede2e5a00d" (UID: "566357c8-1527-4312-a95f-10ede2e5a00d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.023337 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/566357c8-1527-4312-a95f-10ede2e5a00d-pod-info" (OuterVolumeSpecName: "pod-info") pod "566357c8-1527-4312-a95f-10ede2e5a00d" (UID: "566357c8-1527-4312-a95f-10ede2e5a00d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.024470 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-kube-api-access-279xl" (OuterVolumeSpecName: "kube-api-access-279xl") pod "566357c8-1527-4312-a95f-10ede2e5a00d" (UID: "566357c8-1527-4312-a95f-10ede2e5a00d"). InnerVolumeSpecName "kube-api-access-279xl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.026995 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242" (OuterVolumeSpecName: "persistence") pod "566357c8-1527-4312-a95f-10ede2e5a00d" (UID: "566357c8-1527-4312-a95f-10ede2e5a00d"). InnerVolumeSpecName "pvc-2fd950b7-e39d-4574-83d9-cb82a7284242". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.048661 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-server-conf" (OuterVolumeSpecName: "server-conf") pod "566357c8-1527-4312-a95f-10ede2e5a00d" (UID: "566357c8-1527-4312-a95f-10ede2e5a00d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.087020 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "566357c8-1527-4312-a95f-10ede2e5a00d" (UID: "566357c8-1527-4312-a95f-10ede2e5a00d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.112949 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.112979 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.112990 4876 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-server-conf\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.113000 4876 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/566357c8-1527-4312-a95f-10ede2e5a00d-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.113041 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") on node \"crc\" " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.113055 4876 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/566357c8-1527-4312-a95f-10ede2e5a00d-pod-info\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.113069 4876 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/566357c8-1527-4312-a95f-10ede2e5a00d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.113081 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/566357c8-1527-4312-a95f-10ede2e5a00d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.113093 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279xl\" (UniqueName: \"kubernetes.io/projected/566357c8-1527-4312-a95f-10ede2e5a00d-kube-api-access-279xl\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.128353 4876 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.128482 4876 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-2fd950b7-e39d-4574-83d9-cb82a7284242" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242") on node "crc" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.197256 4876 generic.go:334] "Generic (PLEG): container finished" podID="566357c8-1527-4312-a95f-10ede2e5a00d" containerID="738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3" exitCode=0 Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.197399 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"566357c8-1527-4312-a95f-10ede2e5a00d","Type":"ContainerDied","Data":"738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3"} Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.197549 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"566357c8-1527-4312-a95f-10ede2e5a00d","Type":"ContainerDied","Data":"6c20c0f9ace13fcba598c1ef3ee10d73982b71ce1f7905549bb900769257127e"} Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.197642 4876 scope.go:117] "RemoveContainer" containerID="738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.197813 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.214160 4876 reconciler_common.go:293] "Volume detached for volume \"pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.259861 4876 scope.go:117] "RemoveContainer" containerID="47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.267556 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.273305 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.291799 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 08:32:10 crc kubenswrapper[4876]: E1215 08:32:10.292097 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="566357c8-1527-4312-a95f-10ede2e5a00d" containerName="rabbitmq" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.292143 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="566357c8-1527-4312-a95f-10ede2e5a00d" containerName="rabbitmq" Dec 15 08:32:10 crc kubenswrapper[4876]: E1215 08:32:10.292172 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="566357c8-1527-4312-a95f-10ede2e5a00d" containerName="setup-container" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.292178 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="566357c8-1527-4312-a95f-10ede2e5a00d" containerName="setup-container" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.292321 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="566357c8-1527-4312-a95f-10ede2e5a00d" containerName="rabbitmq" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.293007 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.303853 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.303939 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-b7t85" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.304065 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.303856 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.304178 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.314400 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.386594 4876 scope.go:117] "RemoveContainer" containerID="738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3" Dec 15 08:32:10 crc kubenswrapper[4876]: E1215 08:32:10.387382 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3\": container with ID starting with 738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3 not found: ID does not exist" containerID="738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.387419 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3"} err="failed to get container status \"738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3\": rpc error: code = NotFound desc = could not find container \"738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3\": container with ID starting with 738e9702f082aa818257bdbf7a0f2aaaed5678c6b52d9f39eb759cebc445dcc3 not found: ID does not exist" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.387458 4876 scope.go:117] "RemoveContainer" containerID="47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84" Dec 15 08:32:10 crc kubenswrapper[4876]: E1215 08:32:10.387951 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84\": container with ID starting with 47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84 not found: ID does not exist" containerID="47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.387986 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84"} err="failed to get container status \"47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84\": rpc error: code = NotFound desc = could not find container \"47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84\": container with ID starting with 47e1ae6fbde09cd25cbc9e22a7166b4c61b729eb34f2de582f9d2c14e4902d84 not found: ID does not exist" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.416023 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8pnm\" (UniqueName: \"kubernetes.io/projected/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-kube-api-access-k8pnm\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.416077 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.416274 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.416304 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.416332 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.416364 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.416405 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.416442 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.416485 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.518061 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.518225 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.518401 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8pnm\" (UniqueName: \"kubernetes.io/projected/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-kube-api-access-k8pnm\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.518460 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.518493 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.518707 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.518731 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.518759 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.518806 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.519248 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.519570 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.519818 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.520310 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.521744 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.521771 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/68b45e4116eb64b4120692a1a5de30fd6206f1613d1f7b136554621a5204b308/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.523139 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.523639 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.524884 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.541994 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8pnm\" (UniqueName: \"kubernetes.io/projected/4d08f1c6-5e22-447b-8ba4-0205b37e47eb-kube-api-access-k8pnm\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.544479 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2fd950b7-e39d-4574-83d9-cb82a7284242\") pod \"rabbitmq-server-0\" (UID: \"4d08f1c6-5e22-447b-8ba4-0205b37e47eb\") " pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.570844 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.622412 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.716538 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="566357c8-1527-4312-a95f-10ede2e5a00d" path="/var/lib/kubelet/pods/566357c8-1527-4312-a95f-10ede2e5a00d/volumes" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.723685 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwfml\" (UniqueName: \"kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-kube-api-access-nwfml\") pod \"d0aa962d-8969-416e-b841-8a1ad7c4b077\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.723738 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-plugins\") pod \"d0aa962d-8969-416e-b841-8a1ad7c4b077\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.723771 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-erlang-cookie\") pod \"d0aa962d-8969-416e-b841-8a1ad7c4b077\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.723799 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-server-conf\") pod \"d0aa962d-8969-416e-b841-8a1ad7c4b077\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.723899 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-confd\") pod \"d0aa962d-8969-416e-b841-8a1ad7c4b077\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.723926 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-plugins-conf\") pod \"d0aa962d-8969-416e-b841-8a1ad7c4b077\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.723949 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d0aa962d-8969-416e-b841-8a1ad7c4b077-pod-info\") pod \"d0aa962d-8969-416e-b841-8a1ad7c4b077\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.724067 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") pod \"d0aa962d-8969-416e-b841-8a1ad7c4b077\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.724093 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d0aa962d-8969-416e-b841-8a1ad7c4b077-erlang-cookie-secret\") pod \"d0aa962d-8969-416e-b841-8a1ad7c4b077\" (UID: \"d0aa962d-8969-416e-b841-8a1ad7c4b077\") " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.725254 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d0aa962d-8969-416e-b841-8a1ad7c4b077" (UID: "d0aa962d-8969-416e-b841-8a1ad7c4b077"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.725263 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d0aa962d-8969-416e-b841-8a1ad7c4b077" (UID: "d0aa962d-8969-416e-b841-8a1ad7c4b077"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.725353 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d0aa962d-8969-416e-b841-8a1ad7c4b077" (UID: "d0aa962d-8969-416e-b841-8a1ad7c4b077"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.732005 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-kube-api-access-nwfml" (OuterVolumeSpecName: "kube-api-access-nwfml") pod "d0aa962d-8969-416e-b841-8a1ad7c4b077" (UID: "d0aa962d-8969-416e-b841-8a1ad7c4b077"). InnerVolumeSpecName "kube-api-access-nwfml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.738173 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d0aa962d-8969-416e-b841-8a1ad7c4b077-pod-info" (OuterVolumeSpecName: "pod-info") pod "d0aa962d-8969-416e-b841-8a1ad7c4b077" (UID: "d0aa962d-8969-416e-b841-8a1ad7c4b077"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.740317 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0aa962d-8969-416e-b841-8a1ad7c4b077-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d0aa962d-8969-416e-b841-8a1ad7c4b077" (UID: "d0aa962d-8969-416e-b841-8a1ad7c4b077"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.746222 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4" (OuterVolumeSpecName: "persistence") pod "d0aa962d-8969-416e-b841-8a1ad7c4b077" (UID: "d0aa962d-8969-416e-b841-8a1ad7c4b077"). InnerVolumeSpecName "pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.759639 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-server-conf" (OuterVolumeSpecName: "server-conf") pod "d0aa962d-8969-416e-b841-8a1ad7c4b077" (UID: "d0aa962d-8969-416e-b841-8a1ad7c4b077"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.817900 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d0aa962d-8969-416e-b841-8a1ad7c4b077" (UID: "d0aa962d-8969-416e-b841-8a1ad7c4b077"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.825200 4876 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.825234 4876 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d0aa962d-8969-416e-b841-8a1ad7c4b077-pod-info\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.825243 4876 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d0aa962d-8969-416e-b841-8a1ad7c4b077-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.825265 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") on node \"crc\" " Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.825277 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwfml\" (UniqueName: \"kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-kube-api-access-nwfml\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.825286 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.825297 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.825305 4876 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d0aa962d-8969-416e-b841-8a1ad7c4b077-server-conf\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.825313 4876 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d0aa962d-8969-416e-b841-8a1ad7c4b077-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.839058 4876 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.839282 4876 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4") on node "crc" Dec 15 08:32:10 crc kubenswrapper[4876]: I1215 08:32:10.926905 4876 reconciler_common.go:293] "Volume detached for volume \"pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.120430 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.205773 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4d08f1c6-5e22-447b-8ba4-0205b37e47eb","Type":"ContainerStarted","Data":"494f8822b537bbe138fa81e618d3166f75c0333b94ea2e464a44e37e5a41bbc2"} Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.208916 4876 generic.go:334] "Generic (PLEG): container finished" podID="d0aa962d-8969-416e-b841-8a1ad7c4b077" containerID="aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f" exitCode=0 Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.208950 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d0aa962d-8969-416e-b841-8a1ad7c4b077","Type":"ContainerDied","Data":"aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f"} Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.208969 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"d0aa962d-8969-416e-b841-8a1ad7c4b077","Type":"ContainerDied","Data":"06b07fb2392fdb7e3cfd9fc37e4c9a8fbc34d845da61c391c00b17c86664da63"} Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.208984 4876 scope.go:117] "RemoveContainer" containerID="aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.209071 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.231774 4876 scope.go:117] "RemoveContainer" containerID="2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.258758 4876 scope.go:117] "RemoveContainer" containerID="aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f" Dec 15 08:32:11 crc kubenswrapper[4876]: E1215 08:32:11.262781 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f\": container with ID starting with aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f not found: ID does not exist" containerID="aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.262838 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f"} err="failed to get container status \"aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f\": rpc error: code = NotFound desc = could not find container \"aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f\": container with ID starting with aaac2a7f243df5a7a0c0a09a06810bdc2a8f84405c5e037b5d98c90b6276073f not found: ID does not exist" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.262873 4876 scope.go:117] "RemoveContainer" containerID="2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.263087 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 08:32:11 crc kubenswrapper[4876]: E1215 08:32:11.263882 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b\": container with ID starting with 2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b not found: ID does not exist" containerID="2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.263905 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b"} err="failed to get container status \"2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b\": rpc error: code = NotFound desc = could not find container \"2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b\": container with ID starting with 2302e21526c7a682f8d3d760f1df777bf378726c877f4434e18f3c5653ef9e4b not found: ID does not exist" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.277494 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.297197 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 08:32:11 crc kubenswrapper[4876]: E1215 08:32:11.297862 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0aa962d-8969-416e-b841-8a1ad7c4b077" containerName="rabbitmq" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.297883 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0aa962d-8969-416e-b841-8a1ad7c4b077" containerName="rabbitmq" Dec 15 08:32:11 crc kubenswrapper[4876]: E1215 08:32:11.297930 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0aa962d-8969-416e-b841-8a1ad7c4b077" containerName="setup-container" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.297943 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0aa962d-8969-416e-b841-8a1ad7c4b077" containerName="setup-container" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.298382 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0aa962d-8969-416e-b841-8a1ad7c4b077" containerName="rabbitmq" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.302999 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.303603 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.307521 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.307826 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-vkzsl" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.307907 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.308050 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.307916 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.332313 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.378433 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57484c487-vwjxf"] Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.378688 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57484c487-vwjxf" podUID="39086c72-7371-4c97-af08-cac754e741bf" containerName="dnsmasq-dns" containerID="cri-o://1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff" gracePeriod=10 Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.433566 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c0cb0b72-61fd-4427-ac5d-c91e21cac028-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.433628 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c0cb0b72-61fd-4427-ac5d-c91e21cac028-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.433659 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c0cb0b72-61fd-4427-ac5d-c91e21cac028-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.433714 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c0cb0b72-61fd-4427-ac5d-c91e21cac028-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.433829 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c0cb0b72-61fd-4427-ac5d-c91e21cac028-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.433897 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c0cb0b72-61fd-4427-ac5d-c91e21cac028-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.433915 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c0cb0b72-61fd-4427-ac5d-c91e21cac028-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.433957 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnqcd\" (UniqueName: \"kubernetes.io/projected/c0cb0b72-61fd-4427-ac5d-c91e21cac028-kube-api-access-dnqcd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.434056 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.535330 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c0cb0b72-61fd-4427-ac5d-c91e21cac028-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.535381 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c0cb0b72-61fd-4427-ac5d-c91e21cac028-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.535412 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c0cb0b72-61fd-4427-ac5d-c91e21cac028-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.535430 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c0cb0b72-61fd-4427-ac5d-c91e21cac028-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.535453 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnqcd\" (UniqueName: \"kubernetes.io/projected/c0cb0b72-61fd-4427-ac5d-c91e21cac028-kube-api-access-dnqcd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.535490 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.535511 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c0cb0b72-61fd-4427-ac5d-c91e21cac028-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.535542 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c0cb0b72-61fd-4427-ac5d-c91e21cac028-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.535564 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c0cb0b72-61fd-4427-ac5d-c91e21cac028-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.536050 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c0cb0b72-61fd-4427-ac5d-c91e21cac028-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.536697 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c0cb0b72-61fd-4427-ac5d-c91e21cac028-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.536953 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c0cb0b72-61fd-4427-ac5d-c91e21cac028-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.538223 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c0cb0b72-61fd-4427-ac5d-c91e21cac028-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.539559 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.539593 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/46899d63f101f9fe6a4e0f21f7f210f92524f3b9278717f0edc1ca1fbedccb40/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.539941 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c0cb0b72-61fd-4427-ac5d-c91e21cac028-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.540064 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c0cb0b72-61fd-4427-ac5d-c91e21cac028-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.540879 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c0cb0b72-61fd-4427-ac5d-c91e21cac028-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.563940 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnqcd\" (UniqueName: \"kubernetes.io/projected/c0cb0b72-61fd-4427-ac5d-c91e21cac028-kube-api-access-dnqcd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.574907 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-10fc4bf5-c6aa-4acf-bbae-0aa9972db9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"c0cb0b72-61fd-4427-ac5d-c91e21cac028\") " pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.635554 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.758953 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.839732 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-dns-svc\") pod \"39086c72-7371-4c97-af08-cac754e741bf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.839882 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwtxl\" (UniqueName: \"kubernetes.io/projected/39086c72-7371-4c97-af08-cac754e741bf-kube-api-access-dwtxl\") pod \"39086c72-7371-4c97-af08-cac754e741bf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.839906 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-config\") pod \"39086c72-7371-4c97-af08-cac754e741bf\" (UID: \"39086c72-7371-4c97-af08-cac754e741bf\") " Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.846651 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39086c72-7371-4c97-af08-cac754e741bf-kube-api-access-dwtxl" (OuterVolumeSpecName: "kube-api-access-dwtxl") pod "39086c72-7371-4c97-af08-cac754e741bf" (UID: "39086c72-7371-4c97-af08-cac754e741bf"). InnerVolumeSpecName "kube-api-access-dwtxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.875954 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-config" (OuterVolumeSpecName: "config") pod "39086c72-7371-4c97-af08-cac754e741bf" (UID: "39086c72-7371-4c97-af08-cac754e741bf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.941002 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwtxl\" (UniqueName: \"kubernetes.io/projected/39086c72-7371-4c97-af08-cac754e741bf-kube-api-access-dwtxl\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:11 crc kubenswrapper[4876]: I1215 08:32:11.941033 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.004254 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "39086c72-7371-4c97-af08-cac754e741bf" (UID: "39086c72-7371-4c97-af08-cac754e741bf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.042211 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/39086c72-7371-4c97-af08-cac754e741bf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.067187 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.218504 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c0cb0b72-61fd-4427-ac5d-c91e21cac028","Type":"ContainerStarted","Data":"42d79ead50e850b1ac073aa411312fd3ef87588ad3ea3b5e9b80a915d18e0e6d"} Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.220888 4876 generic.go:334] "Generic (PLEG): container finished" podID="39086c72-7371-4c97-af08-cac754e741bf" containerID="1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff" exitCode=0 Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.220955 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57484c487-vwjxf" event={"ID":"39086c72-7371-4c97-af08-cac754e741bf","Type":"ContainerDied","Data":"1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff"} Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.220980 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57484c487-vwjxf" event={"ID":"39086c72-7371-4c97-af08-cac754e741bf","Type":"ContainerDied","Data":"a56ab57cc910e27221d56df7733574b4ffe2c4487ad9b77438a41e8e0679ac02"} Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.221001 4876 scope.go:117] "RemoveContainer" containerID="1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.221194 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57484c487-vwjxf" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.225793 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4d08f1c6-5e22-447b-8ba4-0205b37e47eb","Type":"ContainerStarted","Data":"06dd64350a2a55bfe80f0e806dfa344fe84b92ac680726348502d5fd05490290"} Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.249342 4876 scope.go:117] "RemoveContainer" containerID="e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.272497 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57484c487-vwjxf"] Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.278044 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57484c487-vwjxf"] Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.286077 4876 scope.go:117] "RemoveContainer" containerID="1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff" Dec 15 08:32:12 crc kubenswrapper[4876]: E1215 08:32:12.286787 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff\": container with ID starting with 1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff not found: ID does not exist" containerID="1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.286868 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff"} err="failed to get container status \"1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff\": rpc error: code = NotFound desc = could not find container \"1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff\": container with ID starting with 1adbadf6f163d1bd6a9d3a3a565b50c325e08e8b4dc55b93b44f5135682231ff not found: ID does not exist" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.286911 4876 scope.go:117] "RemoveContainer" containerID="e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa" Dec 15 08:32:12 crc kubenswrapper[4876]: E1215 08:32:12.287329 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa\": container with ID starting with e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa not found: ID does not exist" containerID="e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.287393 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa"} err="failed to get container status \"e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa\": rpc error: code = NotFound desc = could not find container \"e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa\": container with ID starting with e9aca41164a6e994fed32e43f4ec2f3a7b8503050c8f17f34ce284de4c4ef1fa not found: ID does not exist" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.718576 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39086c72-7371-4c97-af08-cac754e741bf" path="/var/lib/kubelet/pods/39086c72-7371-4c97-af08-cac754e741bf/volumes" Dec 15 08:32:12 crc kubenswrapper[4876]: I1215 08:32:12.719708 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0aa962d-8969-416e-b841-8a1ad7c4b077" path="/var/lib/kubelet/pods/d0aa962d-8969-416e-b841-8a1ad7c4b077/volumes" Dec 15 08:32:13 crc kubenswrapper[4876]: I1215 08:32:13.235896 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c0cb0b72-61fd-4427-ac5d-c91e21cac028","Type":"ContainerStarted","Data":"57bc6f031419ffb7c80bd7ecb3feac529d8a64c5e629f76c156a0a744bcdb648"} Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.136708 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h2fml"] Dec 15 08:32:42 crc kubenswrapper[4876]: E1215 08:32:42.137680 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39086c72-7371-4c97-af08-cac754e741bf" containerName="dnsmasq-dns" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.137696 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="39086c72-7371-4c97-af08-cac754e741bf" containerName="dnsmasq-dns" Dec 15 08:32:42 crc kubenswrapper[4876]: E1215 08:32:42.137728 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39086c72-7371-4c97-af08-cac754e741bf" containerName="init" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.137736 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="39086c72-7371-4c97-af08-cac754e741bf" containerName="init" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.137887 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="39086c72-7371-4c97-af08-cac754e741bf" containerName="dnsmasq-dns" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.139244 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.143525 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h2fml"] Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.210485 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-utilities\") pod \"certified-operators-h2fml\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.210562 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-catalog-content\") pod \"certified-operators-h2fml\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.210758 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q97jj\" (UniqueName: \"kubernetes.io/projected/d54569fe-3b85-452c-bca5-ce79cb54d9b8-kube-api-access-q97jj\") pod \"certified-operators-h2fml\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.312005 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-catalog-content\") pod \"certified-operators-h2fml\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.312121 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q97jj\" (UniqueName: \"kubernetes.io/projected/d54569fe-3b85-452c-bca5-ce79cb54d9b8-kube-api-access-q97jj\") pod \"certified-operators-h2fml\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.312197 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-utilities\") pod \"certified-operators-h2fml\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.312715 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-catalog-content\") pod \"certified-operators-h2fml\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.312797 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-utilities\") pod \"certified-operators-h2fml\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.330719 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q97jj\" (UniqueName: \"kubernetes.io/projected/d54569fe-3b85-452c-bca5-ce79cb54d9b8-kube-api-access-q97jj\") pod \"certified-operators-h2fml\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.459823 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:42 crc kubenswrapper[4876]: W1215 08:32:42.913581 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd54569fe_3b85_452c_bca5_ce79cb54d9b8.slice/crio-25c02efec1f062cc1c2121b1f5fffb33b95dff492e360d68bf69279d2765eecb WatchSource:0}: Error finding container 25c02efec1f062cc1c2121b1f5fffb33b95dff492e360d68bf69279d2765eecb: Status 404 returned error can't find the container with id 25c02efec1f062cc1c2121b1f5fffb33b95dff492e360d68bf69279d2765eecb Dec 15 08:32:42 crc kubenswrapper[4876]: I1215 08:32:42.914348 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h2fml"] Dec 15 08:32:43 crc kubenswrapper[4876]: I1215 08:32:43.446040 4876 generic.go:334] "Generic (PLEG): container finished" podID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerID="449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd" exitCode=0 Dec 15 08:32:43 crc kubenswrapper[4876]: I1215 08:32:43.446158 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2fml" event={"ID":"d54569fe-3b85-452c-bca5-ce79cb54d9b8","Type":"ContainerDied","Data":"449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd"} Dec 15 08:32:43 crc kubenswrapper[4876]: I1215 08:32:43.446453 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2fml" event={"ID":"d54569fe-3b85-452c-bca5-ce79cb54d9b8","Type":"ContainerStarted","Data":"25c02efec1f062cc1c2121b1f5fffb33b95dff492e360d68bf69279d2765eecb"} Dec 15 08:32:44 crc kubenswrapper[4876]: I1215 08:32:44.454553 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2fml" event={"ID":"d54569fe-3b85-452c-bca5-ce79cb54d9b8","Type":"ContainerStarted","Data":"eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998"} Dec 15 08:32:45 crc kubenswrapper[4876]: I1215 08:32:45.462451 4876 generic.go:334] "Generic (PLEG): container finished" podID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerID="eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998" exitCode=0 Dec 15 08:32:45 crc kubenswrapper[4876]: I1215 08:32:45.462517 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2fml" event={"ID":"d54569fe-3b85-452c-bca5-ce79cb54d9b8","Type":"ContainerDied","Data":"eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998"} Dec 15 08:32:45 crc kubenswrapper[4876]: I1215 08:32:45.467446 4876 generic.go:334] "Generic (PLEG): container finished" podID="c0cb0b72-61fd-4427-ac5d-c91e21cac028" containerID="57bc6f031419ffb7c80bd7ecb3feac529d8a64c5e629f76c156a0a744bcdb648" exitCode=0 Dec 15 08:32:45 crc kubenswrapper[4876]: I1215 08:32:45.467517 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c0cb0b72-61fd-4427-ac5d-c91e21cac028","Type":"ContainerDied","Data":"57bc6f031419ffb7c80bd7ecb3feac529d8a64c5e629f76c156a0a744bcdb648"} Dec 15 08:32:45 crc kubenswrapper[4876]: I1215 08:32:45.470231 4876 generic.go:334] "Generic (PLEG): container finished" podID="4d08f1c6-5e22-447b-8ba4-0205b37e47eb" containerID="06dd64350a2a55bfe80f0e806dfa344fe84b92ac680726348502d5fd05490290" exitCode=0 Dec 15 08:32:45 crc kubenswrapper[4876]: I1215 08:32:45.470259 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4d08f1c6-5e22-447b-8ba4-0205b37e47eb","Type":"ContainerDied","Data":"06dd64350a2a55bfe80f0e806dfa344fe84b92ac680726348502d5fd05490290"} Dec 15 08:32:46 crc kubenswrapper[4876]: I1215 08:32:46.478224 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2fml" event={"ID":"d54569fe-3b85-452c-bca5-ce79cb54d9b8","Type":"ContainerStarted","Data":"2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57"} Dec 15 08:32:46 crc kubenswrapper[4876]: I1215 08:32:46.480628 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c0cb0b72-61fd-4427-ac5d-c91e21cac028","Type":"ContainerStarted","Data":"0557c289c03341eaa4668ae83bab4c954623482edec38ecd8c2d21f2558f3951"} Dec 15 08:32:46 crc kubenswrapper[4876]: I1215 08:32:46.480821 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:32:46 crc kubenswrapper[4876]: I1215 08:32:46.482387 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"4d08f1c6-5e22-447b-8ba4-0205b37e47eb","Type":"ContainerStarted","Data":"0fa28701f2be638e18defddc5cda1944ed7e7434f36638e2fee36bd092ce2428"} Dec 15 08:32:46 crc kubenswrapper[4876]: I1215 08:32:46.482568 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 15 08:32:46 crc kubenswrapper[4876]: I1215 08:32:46.497781 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h2fml" podStartSLOduration=1.965494683 podStartE2EDuration="4.497760211s" podCreationTimestamp="2025-12-15 08:32:42 +0000 UTC" firstStartedPulling="2025-12-15 08:32:43.447498402 +0000 UTC m=+6089.018641313" lastFinishedPulling="2025-12-15 08:32:45.97976393 +0000 UTC m=+6091.550906841" observedRunningTime="2025-12-15 08:32:46.49772924 +0000 UTC m=+6092.068872171" watchObservedRunningTime="2025-12-15 08:32:46.497760211 +0000 UTC m=+6092.068903132" Dec 15 08:32:46 crc kubenswrapper[4876]: I1215 08:32:46.527789 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.527766372 podStartE2EDuration="36.527766372s" podCreationTimestamp="2025-12-15 08:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:32:46.519464238 +0000 UTC m=+6092.090607159" watchObservedRunningTime="2025-12-15 08:32:46.527766372 +0000 UTC m=+6092.098909283" Dec 15 08:32:46 crc kubenswrapper[4876]: I1215 08:32:46.553173 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=35.553150038 podStartE2EDuration="35.553150038s" podCreationTimestamp="2025-12-15 08:32:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:32:46.547267888 +0000 UTC m=+6092.118410819" watchObservedRunningTime="2025-12-15 08:32:46.553150038 +0000 UTC m=+6092.124292979" Dec 15 08:32:52 crc kubenswrapper[4876]: I1215 08:32:52.461033 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:52 crc kubenswrapper[4876]: I1215 08:32:52.462461 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:52 crc kubenswrapper[4876]: I1215 08:32:52.502066 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:52 crc kubenswrapper[4876]: I1215 08:32:52.656762 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:52 crc kubenswrapper[4876]: I1215 08:32:52.742995 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h2fml"] Dec 15 08:32:54 crc kubenswrapper[4876]: I1215 08:32:54.639157 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h2fml" podUID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerName="registry-server" containerID="cri-o://2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57" gracePeriod=2 Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.017600 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.191158 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q97jj\" (UniqueName: \"kubernetes.io/projected/d54569fe-3b85-452c-bca5-ce79cb54d9b8-kube-api-access-q97jj\") pod \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.191228 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-catalog-content\") pod \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.191395 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-utilities\") pod \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\" (UID: \"d54569fe-3b85-452c-bca5-ce79cb54d9b8\") " Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.192589 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-utilities" (OuterVolumeSpecName: "utilities") pod "d54569fe-3b85-452c-bca5-ce79cb54d9b8" (UID: "d54569fe-3b85-452c-bca5-ce79cb54d9b8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.199715 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d54569fe-3b85-452c-bca5-ce79cb54d9b8-kube-api-access-q97jj" (OuterVolumeSpecName: "kube-api-access-q97jj") pod "d54569fe-3b85-452c-bca5-ce79cb54d9b8" (UID: "d54569fe-3b85-452c-bca5-ce79cb54d9b8"). InnerVolumeSpecName "kube-api-access-q97jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.251151 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d54569fe-3b85-452c-bca5-ce79cb54d9b8" (UID: "d54569fe-3b85-452c-bca5-ce79cb54d9b8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.293839 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.293872 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q97jj\" (UniqueName: \"kubernetes.io/projected/d54569fe-3b85-452c-bca5-ce79cb54d9b8-kube-api-access-q97jj\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.293886 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d54569fe-3b85-452c-bca5-ce79cb54d9b8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.648697 4876 generic.go:334] "Generic (PLEG): container finished" podID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerID="2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57" exitCode=0 Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.648770 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h2fml" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.648774 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2fml" event={"ID":"d54569fe-3b85-452c-bca5-ce79cb54d9b8","Type":"ContainerDied","Data":"2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57"} Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.649269 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h2fml" event={"ID":"d54569fe-3b85-452c-bca5-ce79cb54d9b8","Type":"ContainerDied","Data":"25c02efec1f062cc1c2121b1f5fffb33b95dff492e360d68bf69279d2765eecb"} Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.649293 4876 scope.go:117] "RemoveContainer" containerID="2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.670626 4876 scope.go:117] "RemoveContainer" containerID="eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.692592 4876 scope.go:117] "RemoveContainer" containerID="449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.694954 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h2fml"] Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.701004 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h2fml"] Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.721161 4876 scope.go:117] "RemoveContainer" containerID="2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57" Dec 15 08:32:55 crc kubenswrapper[4876]: E1215 08:32:55.721563 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57\": container with ID starting with 2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57 not found: ID does not exist" containerID="2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.721599 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57"} err="failed to get container status \"2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57\": rpc error: code = NotFound desc = could not find container \"2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57\": container with ID starting with 2046ba1e2aa3a38d7e843e834cafc722c16b20343c1a04994832b7b340e90f57 not found: ID does not exist" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.721623 4876 scope.go:117] "RemoveContainer" containerID="eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998" Dec 15 08:32:55 crc kubenswrapper[4876]: E1215 08:32:55.721993 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998\": container with ID starting with eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998 not found: ID does not exist" containerID="eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.722044 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998"} err="failed to get container status \"eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998\": rpc error: code = NotFound desc = could not find container \"eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998\": container with ID starting with eac8d0523eff4fb2f88945360028f00b71dcb0f932fac66e06cab5ffa898d998 not found: ID does not exist" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.722076 4876 scope.go:117] "RemoveContainer" containerID="449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd" Dec 15 08:32:55 crc kubenswrapper[4876]: E1215 08:32:55.722487 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd\": container with ID starting with 449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd not found: ID does not exist" containerID="449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd" Dec 15 08:32:55 crc kubenswrapper[4876]: I1215 08:32:55.722511 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd"} err="failed to get container status \"449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd\": rpc error: code = NotFound desc = could not find container \"449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd\": container with ID starting with 449f7a5373e4f6b4e99e1f3b7a7f46003b868e945d6fd9ccddb00712cd77c0fd not found: ID does not exist" Dec 15 08:32:56 crc kubenswrapper[4876]: I1215 08:32:56.714189 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" path="/var/lib/kubelet/pods/d54569fe-3b85-452c-bca5-ce79cb54d9b8/volumes" Dec 15 08:32:57 crc kubenswrapper[4876]: I1215 08:32:57.323262 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:32:57 crc kubenswrapper[4876]: I1215 08:32:57.323638 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:33:00 crc kubenswrapper[4876]: I1215 08:33:00.626176 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 15 08:33:01 crc kubenswrapper[4876]: I1215 08:33:01.638423 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 15 08:33:12 crc kubenswrapper[4876]: I1215 08:33:12.861359 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Dec 15 08:33:12 crc kubenswrapper[4876]: E1215 08:33:12.862324 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerName="extract-utilities" Dec 15 08:33:12 crc kubenswrapper[4876]: I1215 08:33:12.862344 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerName="extract-utilities" Dec 15 08:33:12 crc kubenswrapper[4876]: E1215 08:33:12.862367 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerName="registry-server" Dec 15 08:33:12 crc kubenswrapper[4876]: I1215 08:33:12.862375 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerName="registry-server" Dec 15 08:33:12 crc kubenswrapper[4876]: E1215 08:33:12.862393 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerName="extract-content" Dec 15 08:33:12 crc kubenswrapper[4876]: I1215 08:33:12.862401 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerName="extract-content" Dec 15 08:33:12 crc kubenswrapper[4876]: I1215 08:33:12.862571 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d54569fe-3b85-452c-bca5-ce79cb54d9b8" containerName="registry-server" Dec 15 08:33:12 crc kubenswrapper[4876]: I1215 08:33:12.863087 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 15 08:33:12 crc kubenswrapper[4876]: I1215 08:33:12.865680 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-qgf6m" Dec 15 08:33:12 crc kubenswrapper[4876]: I1215 08:33:12.866164 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 15 08:33:13 crc kubenswrapper[4876]: I1215 08:33:13.029942 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfkt4\" (UniqueName: \"kubernetes.io/projected/f892ee03-a007-41de-af67-2e2c21f796fa-kube-api-access-dfkt4\") pod \"mariadb-client-1-default\" (UID: \"f892ee03-a007-41de-af67-2e2c21f796fa\") " pod="openstack/mariadb-client-1-default" Dec 15 08:33:13 crc kubenswrapper[4876]: I1215 08:33:13.132323 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfkt4\" (UniqueName: \"kubernetes.io/projected/f892ee03-a007-41de-af67-2e2c21f796fa-kube-api-access-dfkt4\") pod \"mariadb-client-1-default\" (UID: \"f892ee03-a007-41de-af67-2e2c21f796fa\") " pod="openstack/mariadb-client-1-default" Dec 15 08:33:13 crc kubenswrapper[4876]: I1215 08:33:13.158244 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfkt4\" (UniqueName: \"kubernetes.io/projected/f892ee03-a007-41de-af67-2e2c21f796fa-kube-api-access-dfkt4\") pod \"mariadb-client-1-default\" (UID: \"f892ee03-a007-41de-af67-2e2c21f796fa\") " pod="openstack/mariadb-client-1-default" Dec 15 08:33:13 crc kubenswrapper[4876]: I1215 08:33:13.184864 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 15 08:33:13 crc kubenswrapper[4876]: I1215 08:33:13.660250 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 15 08:33:13 crc kubenswrapper[4876]: W1215 08:33:13.665437 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf892ee03_a007_41de_af67_2e2c21f796fa.slice/crio-946ff72dda398e90a7d5ce7b32918bc70ab9bb4c213319d20ff9fc0369fc3e8e WatchSource:0}: Error finding container 946ff72dda398e90a7d5ce7b32918bc70ab9bb4c213319d20ff9fc0369fc3e8e: Status 404 returned error can't find the container with id 946ff72dda398e90a7d5ce7b32918bc70ab9bb4c213319d20ff9fc0369fc3e8e Dec 15 08:33:14 crc kubenswrapper[4876]: I1215 08:33:14.059442 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"f892ee03-a007-41de-af67-2e2c21f796fa","Type":"ContainerStarted","Data":"946ff72dda398e90a7d5ce7b32918bc70ab9bb4c213319d20ff9fc0369fc3e8e"} Dec 15 08:33:15 crc kubenswrapper[4876]: I1215 08:33:15.067981 4876 generic.go:334] "Generic (PLEG): container finished" podID="f892ee03-a007-41de-af67-2e2c21f796fa" containerID="3fca34e07a3c4c2b004ba4d2236d5a1cda5d00161c960358cb051fd55d163721" exitCode=0 Dec 15 08:33:15 crc kubenswrapper[4876]: I1215 08:33:15.068090 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"f892ee03-a007-41de-af67-2e2c21f796fa","Type":"ContainerDied","Data":"3fca34e07a3c4c2b004ba4d2236d5a1cda5d00161c960358cb051fd55d163721"} Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.424222 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.449439 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_f892ee03-a007-41de-af67-2e2c21f796fa/mariadb-client-1-default/0.log" Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.474314 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.479655 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.586547 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfkt4\" (UniqueName: \"kubernetes.io/projected/f892ee03-a007-41de-af67-2e2c21f796fa-kube-api-access-dfkt4\") pod \"f892ee03-a007-41de-af67-2e2c21f796fa\" (UID: \"f892ee03-a007-41de-af67-2e2c21f796fa\") " Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.591486 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f892ee03-a007-41de-af67-2e2c21f796fa-kube-api-access-dfkt4" (OuterVolumeSpecName: "kube-api-access-dfkt4") pod "f892ee03-a007-41de-af67-2e2c21f796fa" (UID: "f892ee03-a007-41de-af67-2e2c21f796fa"). InnerVolumeSpecName "kube-api-access-dfkt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.688283 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfkt4\" (UniqueName: \"kubernetes.io/projected/f892ee03-a007-41de-af67-2e2c21f796fa-kube-api-access-dfkt4\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.723784 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f892ee03-a007-41de-af67-2e2c21f796fa" path="/var/lib/kubelet/pods/f892ee03-a007-41de-af67-2e2c21f796fa/volumes" Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.870554 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Dec 15 08:33:16 crc kubenswrapper[4876]: E1215 08:33:16.870983 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f892ee03-a007-41de-af67-2e2c21f796fa" containerName="mariadb-client-1-default" Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.871001 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f892ee03-a007-41de-af67-2e2c21f796fa" containerName="mariadb-client-1-default" Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.871189 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f892ee03-a007-41de-af67-2e2c21f796fa" containerName="mariadb-client-1-default" Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.871712 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.877631 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 15 08:33:16 crc kubenswrapper[4876]: I1215 08:33:16.993039 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x98qw\" (UniqueName: \"kubernetes.io/projected/895e7959-4798-45b6-a52a-84ea60a9b490-kube-api-access-x98qw\") pod \"mariadb-client-2-default\" (UID: \"895e7959-4798-45b6-a52a-84ea60a9b490\") " pod="openstack/mariadb-client-2-default" Dec 15 08:33:17 crc kubenswrapper[4876]: I1215 08:33:17.093919 4876 scope.go:117] "RemoveContainer" containerID="3fca34e07a3c4c2b004ba4d2236d5a1cda5d00161c960358cb051fd55d163721" Dec 15 08:33:17 crc kubenswrapper[4876]: I1215 08:33:17.094090 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Dec 15 08:33:17 crc kubenswrapper[4876]: I1215 08:33:17.094241 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x98qw\" (UniqueName: \"kubernetes.io/projected/895e7959-4798-45b6-a52a-84ea60a9b490-kube-api-access-x98qw\") pod \"mariadb-client-2-default\" (UID: \"895e7959-4798-45b6-a52a-84ea60a9b490\") " pod="openstack/mariadb-client-2-default" Dec 15 08:33:17 crc kubenswrapper[4876]: I1215 08:33:17.119673 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x98qw\" (UniqueName: \"kubernetes.io/projected/895e7959-4798-45b6-a52a-84ea60a9b490-kube-api-access-x98qw\") pod \"mariadb-client-2-default\" (UID: \"895e7959-4798-45b6-a52a-84ea60a9b490\") " pod="openstack/mariadb-client-2-default" Dec 15 08:33:17 crc kubenswrapper[4876]: I1215 08:33:17.193347 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 15 08:33:17 crc kubenswrapper[4876]: I1215 08:33:17.672040 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 15 08:33:17 crc kubenswrapper[4876]: W1215 08:33:17.676407 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod895e7959_4798_45b6_a52a_84ea60a9b490.slice/crio-136f7f5bbb77a7ed22dbb5b12279f92015425c2aa743136d573a7b2de65081b2 WatchSource:0}: Error finding container 136f7f5bbb77a7ed22dbb5b12279f92015425c2aa743136d573a7b2de65081b2: Status 404 returned error can't find the container with id 136f7f5bbb77a7ed22dbb5b12279f92015425c2aa743136d573a7b2de65081b2 Dec 15 08:33:18 crc kubenswrapper[4876]: I1215 08:33:18.104476 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"895e7959-4798-45b6-a52a-84ea60a9b490","Type":"ContainerStarted","Data":"77a85dfbaf51ada140c953f4bf3e5b514d6ad5502b58a8c14bbb2a47b256fcec"} Dec 15 08:33:18 crc kubenswrapper[4876]: I1215 08:33:18.104804 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"895e7959-4798-45b6-a52a-84ea60a9b490","Type":"ContainerStarted","Data":"136f7f5bbb77a7ed22dbb5b12279f92015425c2aa743136d573a7b2de65081b2"} Dec 15 08:33:18 crc kubenswrapper[4876]: I1215 08:33:18.115351 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-2-default" podStartSLOduration=2.115336543 podStartE2EDuration="2.115336543s" podCreationTimestamp="2025-12-15 08:33:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:33:18.11488616 +0000 UTC m=+6123.686029061" watchObservedRunningTime="2025-12-15 08:33:18.115336543 +0000 UTC m=+6123.686479454" Dec 15 08:33:19 crc kubenswrapper[4876]: I1215 08:33:19.114265 4876 generic.go:334] "Generic (PLEG): container finished" podID="895e7959-4798-45b6-a52a-84ea60a9b490" containerID="77a85dfbaf51ada140c953f4bf3e5b514d6ad5502b58a8c14bbb2a47b256fcec" exitCode=1 Dec 15 08:33:19 crc kubenswrapper[4876]: I1215 08:33:19.114328 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"895e7959-4798-45b6-a52a-84ea60a9b490","Type":"ContainerDied","Data":"77a85dfbaf51ada140c953f4bf3e5b514d6ad5502b58a8c14bbb2a47b256fcec"} Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.489093 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.523552 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.528730 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.642774 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x98qw\" (UniqueName: \"kubernetes.io/projected/895e7959-4798-45b6-a52a-84ea60a9b490-kube-api-access-x98qw\") pod \"895e7959-4798-45b6-a52a-84ea60a9b490\" (UID: \"895e7959-4798-45b6-a52a-84ea60a9b490\") " Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.648773 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/895e7959-4798-45b6-a52a-84ea60a9b490-kube-api-access-x98qw" (OuterVolumeSpecName: "kube-api-access-x98qw") pod "895e7959-4798-45b6-a52a-84ea60a9b490" (UID: "895e7959-4798-45b6-a52a-84ea60a9b490"). InnerVolumeSpecName "kube-api-access-x98qw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.716458 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="895e7959-4798-45b6-a52a-84ea60a9b490" path="/var/lib/kubelet/pods/895e7959-4798-45b6-a52a-84ea60a9b490/volumes" Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.744530 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x98qw\" (UniqueName: \"kubernetes.io/projected/895e7959-4798-45b6-a52a-84ea60a9b490-kube-api-access-x98qw\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.906021 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Dec 15 08:33:20 crc kubenswrapper[4876]: E1215 08:33:20.906342 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="895e7959-4798-45b6-a52a-84ea60a9b490" containerName="mariadb-client-2-default" Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.906364 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="895e7959-4798-45b6-a52a-84ea60a9b490" containerName="mariadb-client-2-default" Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.906518 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="895e7959-4798-45b6-a52a-84ea60a9b490" containerName="mariadb-client-2-default" Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.906972 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 15 08:33:20 crc kubenswrapper[4876]: I1215 08:33:20.959474 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Dec 15 08:33:21 crc kubenswrapper[4876]: I1215 08:33:21.054551 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnw72\" (UniqueName: \"kubernetes.io/projected/287f4f86-610b-41ff-a216-fa78fc1572f5-kube-api-access-jnw72\") pod \"mariadb-client-1\" (UID: \"287f4f86-610b-41ff-a216-fa78fc1572f5\") " pod="openstack/mariadb-client-1" Dec 15 08:33:21 crc kubenswrapper[4876]: I1215 08:33:21.128015 4876 scope.go:117] "RemoveContainer" containerID="77a85dfbaf51ada140c953f4bf3e5b514d6ad5502b58a8c14bbb2a47b256fcec" Dec 15 08:33:21 crc kubenswrapper[4876]: I1215 08:33:21.128064 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Dec 15 08:33:21 crc kubenswrapper[4876]: I1215 08:33:21.156774 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnw72\" (UniqueName: \"kubernetes.io/projected/287f4f86-610b-41ff-a216-fa78fc1572f5-kube-api-access-jnw72\") pod \"mariadb-client-1\" (UID: \"287f4f86-610b-41ff-a216-fa78fc1572f5\") " pod="openstack/mariadb-client-1" Dec 15 08:33:21 crc kubenswrapper[4876]: I1215 08:33:21.183349 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnw72\" (UniqueName: \"kubernetes.io/projected/287f4f86-610b-41ff-a216-fa78fc1572f5-kube-api-access-jnw72\") pod \"mariadb-client-1\" (UID: \"287f4f86-610b-41ff-a216-fa78fc1572f5\") " pod="openstack/mariadb-client-1" Dec 15 08:33:21 crc kubenswrapper[4876]: I1215 08:33:21.268652 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 15 08:33:22 crc kubenswrapper[4876]: I1215 08:33:22.306544 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Dec 15 08:33:22 crc kubenswrapper[4876]: W1215 08:33:22.307433 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod287f4f86_610b_41ff_a216_fa78fc1572f5.slice/crio-a915cb947eeacf4bdaf9935feede3172c5c820279686cd1a5d32048b1a833320 WatchSource:0}: Error finding container a915cb947eeacf4bdaf9935feede3172c5c820279686cd1a5d32048b1a833320: Status 404 returned error can't find the container with id a915cb947eeacf4bdaf9935feede3172c5c820279686cd1a5d32048b1a833320 Dec 15 08:33:23 crc kubenswrapper[4876]: I1215 08:33:23.266987 4876 generic.go:334] "Generic (PLEG): container finished" podID="287f4f86-610b-41ff-a216-fa78fc1572f5" containerID="0f6b5806f023f6337ba80157c89fdcb8c7acbbd27216588567037f03ff215724" exitCode=0 Dec 15 08:33:23 crc kubenswrapper[4876]: I1215 08:33:23.267039 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"287f4f86-610b-41ff-a216-fa78fc1572f5","Type":"ContainerDied","Data":"0f6b5806f023f6337ba80157c89fdcb8c7acbbd27216588567037f03ff215724"} Dec 15 08:33:23 crc kubenswrapper[4876]: I1215 08:33:23.267334 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"287f4f86-610b-41ff-a216-fa78fc1572f5","Type":"ContainerStarted","Data":"a915cb947eeacf4bdaf9935feede3172c5c820279686cd1a5d32048b1a833320"} Dec 15 08:33:24 crc kubenswrapper[4876]: I1215 08:33:24.740487 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 15 08:33:24 crc kubenswrapper[4876]: I1215 08:33:24.760549 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_287f4f86-610b-41ff-a216-fa78fc1572f5/mariadb-client-1/0.log" Dec 15 08:33:24 crc kubenswrapper[4876]: I1215 08:33:24.783579 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Dec 15 08:33:24 crc kubenswrapper[4876]: I1215 08:33:24.790416 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Dec 15 08:33:24 crc kubenswrapper[4876]: I1215 08:33:24.798027 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnw72\" (UniqueName: \"kubernetes.io/projected/287f4f86-610b-41ff-a216-fa78fc1572f5-kube-api-access-jnw72\") pod \"287f4f86-610b-41ff-a216-fa78fc1572f5\" (UID: \"287f4f86-610b-41ff-a216-fa78fc1572f5\") " Dec 15 08:33:24 crc kubenswrapper[4876]: I1215 08:33:24.806033 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/287f4f86-610b-41ff-a216-fa78fc1572f5-kube-api-access-jnw72" (OuterVolumeSpecName: "kube-api-access-jnw72") pod "287f4f86-610b-41ff-a216-fa78fc1572f5" (UID: "287f4f86-610b-41ff-a216-fa78fc1572f5"). InnerVolumeSpecName "kube-api-access-jnw72". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:33:24 crc kubenswrapper[4876]: I1215 08:33:24.899050 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnw72\" (UniqueName: \"kubernetes.io/projected/287f4f86-610b-41ff-a216-fa78fc1572f5-kube-api-access-jnw72\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.204724 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Dec 15 08:33:25 crc kubenswrapper[4876]: E1215 08:33:25.205067 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="287f4f86-610b-41ff-a216-fa78fc1572f5" containerName="mariadb-client-1" Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.205078 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="287f4f86-610b-41ff-a216-fa78fc1572f5" containerName="mariadb-client-1" Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.205218 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="287f4f86-610b-41ff-a216-fa78fc1572f5" containerName="mariadb-client-1" Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.205694 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.228326 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.280984 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a915cb947eeacf4bdaf9935feede3172c5c820279686cd1a5d32048b1a833320" Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.281033 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.304913 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8t74g\" (UniqueName: \"kubernetes.io/projected/5709663f-d7a4-44b9-8f2a-32048c96454a-kube-api-access-8t74g\") pod \"mariadb-client-4-default\" (UID: \"5709663f-d7a4-44b9-8f2a-32048c96454a\") " pod="openstack/mariadb-client-4-default" Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.406717 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8t74g\" (UniqueName: \"kubernetes.io/projected/5709663f-d7a4-44b9-8f2a-32048c96454a-kube-api-access-8t74g\") pod \"mariadb-client-4-default\" (UID: \"5709663f-d7a4-44b9-8f2a-32048c96454a\") " pod="openstack/mariadb-client-4-default" Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.429313 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8t74g\" (UniqueName: \"kubernetes.io/projected/5709663f-d7a4-44b9-8f2a-32048c96454a-kube-api-access-8t74g\") pod \"mariadb-client-4-default\" (UID: \"5709663f-d7a4-44b9-8f2a-32048c96454a\") " pod="openstack/mariadb-client-4-default" Dec 15 08:33:25 crc kubenswrapper[4876]: I1215 08:33:25.531512 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 15 08:33:26 crc kubenswrapper[4876]: I1215 08:33:26.063244 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 15 08:33:26 crc kubenswrapper[4876]: I1215 08:33:26.289054 4876 generic.go:334] "Generic (PLEG): container finished" podID="5709663f-d7a4-44b9-8f2a-32048c96454a" containerID="0ed240638d9ecce3e64f4b2211456ac55febbf4f5b4ae68abe2e4bae192db206" exitCode=0 Dec 15 08:33:26 crc kubenswrapper[4876]: I1215 08:33:26.289192 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"5709663f-d7a4-44b9-8f2a-32048c96454a","Type":"ContainerDied","Data":"0ed240638d9ecce3e64f4b2211456ac55febbf4f5b4ae68abe2e4bae192db206"} Dec 15 08:33:26 crc kubenswrapper[4876]: I1215 08:33:26.289350 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"5709663f-d7a4-44b9-8f2a-32048c96454a","Type":"ContainerStarted","Data":"649e96a240cf20c1676256fce1a7038456ef732f3c2dce6a6419b3077df27327"} Dec 15 08:33:26 crc kubenswrapper[4876]: I1215 08:33:26.714555 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="287f4f86-610b-41ff-a216-fa78fc1572f5" path="/var/lib/kubelet/pods/287f4f86-610b-41ff-a216-fa78fc1572f5/volumes" Dec 15 08:33:27 crc kubenswrapper[4876]: I1215 08:33:27.322745 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:33:27 crc kubenswrapper[4876]: I1215 08:33:27.324784 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:33:27 crc kubenswrapper[4876]: I1215 08:33:27.612973 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 15 08:33:27 crc kubenswrapper[4876]: I1215 08:33:27.632898 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_5709663f-d7a4-44b9-8f2a-32048c96454a/mariadb-client-4-default/0.log" Dec 15 08:33:27 crc kubenswrapper[4876]: I1215 08:33:27.641483 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8t74g\" (UniqueName: \"kubernetes.io/projected/5709663f-d7a4-44b9-8f2a-32048c96454a-kube-api-access-8t74g\") pod \"5709663f-d7a4-44b9-8f2a-32048c96454a\" (UID: \"5709663f-d7a4-44b9-8f2a-32048c96454a\") " Dec 15 08:33:27 crc kubenswrapper[4876]: I1215 08:33:27.651983 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5709663f-d7a4-44b9-8f2a-32048c96454a-kube-api-access-8t74g" (OuterVolumeSpecName: "kube-api-access-8t74g") pod "5709663f-d7a4-44b9-8f2a-32048c96454a" (UID: "5709663f-d7a4-44b9-8f2a-32048c96454a"). InnerVolumeSpecName "kube-api-access-8t74g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:33:27 crc kubenswrapper[4876]: I1215 08:33:27.660510 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 15 08:33:27 crc kubenswrapper[4876]: I1215 08:33:27.668217 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Dec 15 08:33:27 crc kubenswrapper[4876]: I1215 08:33:27.743853 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8t74g\" (UniqueName: \"kubernetes.io/projected/5709663f-d7a4-44b9-8f2a-32048c96454a-kube-api-access-8t74g\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:28 crc kubenswrapper[4876]: I1215 08:33:28.308459 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="649e96a240cf20c1676256fce1a7038456ef732f3c2dce6a6419b3077df27327" Dec 15 08:33:28 crc kubenswrapper[4876]: I1215 08:33:28.308552 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Dec 15 08:33:28 crc kubenswrapper[4876]: I1215 08:33:28.717921 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5709663f-d7a4-44b9-8f2a-32048c96454a" path="/var/lib/kubelet/pods/5709663f-d7a4-44b9-8f2a-32048c96454a/volumes" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.068684 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9x77v"] Dec 15 08:33:29 crc kubenswrapper[4876]: E1215 08:33:29.069019 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5709663f-d7a4-44b9-8f2a-32048c96454a" containerName="mariadb-client-4-default" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.069036 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5709663f-d7a4-44b9-8f2a-32048c96454a" containerName="mariadb-client-4-default" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.069198 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="5709663f-d7a4-44b9-8f2a-32048c96454a" containerName="mariadb-client-4-default" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.070301 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.075692 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9x77v"] Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.163744 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-catalog-content\") pod \"redhat-operators-9x77v\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.163834 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-utilities\") pod \"redhat-operators-9x77v\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.163920 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjfh9\" (UniqueName: \"kubernetes.io/projected/74fc7ef3-4346-4921-894f-18f66c445434-kube-api-access-bjfh9\") pod \"redhat-operators-9x77v\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.265326 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-catalog-content\") pod \"redhat-operators-9x77v\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.265412 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-utilities\") pod \"redhat-operators-9x77v\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.265458 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjfh9\" (UniqueName: \"kubernetes.io/projected/74fc7ef3-4346-4921-894f-18f66c445434-kube-api-access-bjfh9\") pod \"redhat-operators-9x77v\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.266079 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-utilities\") pod \"redhat-operators-9x77v\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.266626 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-catalog-content\") pod \"redhat-operators-9x77v\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.291689 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjfh9\" (UniqueName: \"kubernetes.io/projected/74fc7ef3-4346-4921-894f-18f66c445434-kube-api-access-bjfh9\") pod \"redhat-operators-9x77v\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.395281 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:29 crc kubenswrapper[4876]: I1215 08:33:29.815467 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9x77v"] Dec 15 08:33:30 crc kubenswrapper[4876]: I1215 08:33:30.324070 4876 generic.go:334] "Generic (PLEG): container finished" podID="74fc7ef3-4346-4921-894f-18f66c445434" containerID="94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e" exitCode=0 Dec 15 08:33:30 crc kubenswrapper[4876]: I1215 08:33:30.324198 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9x77v" event={"ID":"74fc7ef3-4346-4921-894f-18f66c445434","Type":"ContainerDied","Data":"94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e"} Dec 15 08:33:30 crc kubenswrapper[4876]: I1215 08:33:30.324405 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9x77v" event={"ID":"74fc7ef3-4346-4921-894f-18f66c445434","Type":"ContainerStarted","Data":"090d917cee369dc6a7610f9044e2ee64ab48d23cf7b71ab435916be68a57e044"} Dec 15 08:33:31 crc kubenswrapper[4876]: I1215 08:33:31.372372 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Dec 15 08:33:31 crc kubenswrapper[4876]: I1215 08:33:31.373391 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 15 08:33:31 crc kubenswrapper[4876]: I1215 08:33:31.380771 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-qgf6m" Dec 15 08:33:31 crc kubenswrapper[4876]: I1215 08:33:31.386752 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 15 08:33:31 crc kubenswrapper[4876]: I1215 08:33:31.400154 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7crg\" (UniqueName: \"kubernetes.io/projected/39e15ba5-8af5-4f81-9298-c80f8e6b22b5-kube-api-access-d7crg\") pod \"mariadb-client-5-default\" (UID: \"39e15ba5-8af5-4f81-9298-c80f8e6b22b5\") " pod="openstack/mariadb-client-5-default" Dec 15 08:33:31 crc kubenswrapper[4876]: I1215 08:33:31.501075 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7crg\" (UniqueName: \"kubernetes.io/projected/39e15ba5-8af5-4f81-9298-c80f8e6b22b5-kube-api-access-d7crg\") pod \"mariadb-client-5-default\" (UID: \"39e15ba5-8af5-4f81-9298-c80f8e6b22b5\") " pod="openstack/mariadb-client-5-default" Dec 15 08:33:31 crc kubenswrapper[4876]: I1215 08:33:31.519905 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7crg\" (UniqueName: \"kubernetes.io/projected/39e15ba5-8af5-4f81-9298-c80f8e6b22b5-kube-api-access-d7crg\") pod \"mariadb-client-5-default\" (UID: \"39e15ba5-8af5-4f81-9298-c80f8e6b22b5\") " pod="openstack/mariadb-client-5-default" Dec 15 08:33:31 crc kubenswrapper[4876]: I1215 08:33:31.698509 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 15 08:33:32 crc kubenswrapper[4876]: I1215 08:33:32.199174 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 15 08:33:32 crc kubenswrapper[4876]: I1215 08:33:32.339879 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"39e15ba5-8af5-4f81-9298-c80f8e6b22b5","Type":"ContainerStarted","Data":"a086ba6eaf33ebb323adc804ea41c8db56708e054eb010a777cd4688e30a18b9"} Dec 15 08:33:33 crc kubenswrapper[4876]: I1215 08:33:33.348534 4876 generic.go:334] "Generic (PLEG): container finished" podID="74fc7ef3-4346-4921-894f-18f66c445434" containerID="6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97" exitCode=0 Dec 15 08:33:33 crc kubenswrapper[4876]: I1215 08:33:33.348648 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9x77v" event={"ID":"74fc7ef3-4346-4921-894f-18f66c445434","Type":"ContainerDied","Data":"6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97"} Dec 15 08:33:33 crc kubenswrapper[4876]: I1215 08:33:33.350678 4876 generic.go:334] "Generic (PLEG): container finished" podID="39e15ba5-8af5-4f81-9298-c80f8e6b22b5" containerID="0c097805664b721773bfd601f09489a4d6473be5895dc26c236072f8140b1f55" exitCode=0 Dec 15 08:33:33 crc kubenswrapper[4876]: I1215 08:33:33.350714 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"39e15ba5-8af5-4f81-9298-c80f8e6b22b5","Type":"ContainerDied","Data":"0c097805664b721773bfd601f09489a4d6473be5895dc26c236072f8140b1f55"} Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.359315 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9x77v" event={"ID":"74fc7ef3-4346-4921-894f-18f66c445434","Type":"ContainerStarted","Data":"70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098"} Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.381861 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9x77v" podStartSLOduration=1.856117848 podStartE2EDuration="5.381841712s" podCreationTimestamp="2025-12-15 08:33:29 +0000 UTC" firstStartedPulling="2025-12-15 08:33:30.326488431 +0000 UTC m=+6135.897631342" lastFinishedPulling="2025-12-15 08:33:33.852212285 +0000 UTC m=+6139.423355206" observedRunningTime="2025-12-15 08:33:34.378719956 +0000 UTC m=+6139.949862877" watchObservedRunningTime="2025-12-15 08:33:34.381841712 +0000 UTC m=+6139.952984623" Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.697245 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.718277 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_39e15ba5-8af5-4f81-9298-c80f8e6b22b5/mariadb-client-5-default/0.log" Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.745647 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.751822 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.844812 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7crg\" (UniqueName: \"kubernetes.io/projected/39e15ba5-8af5-4f81-9298-c80f8e6b22b5-kube-api-access-d7crg\") pod \"39e15ba5-8af5-4f81-9298-c80f8e6b22b5\" (UID: \"39e15ba5-8af5-4f81-9298-c80f8e6b22b5\") " Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.852665 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39e15ba5-8af5-4f81-9298-c80f8e6b22b5-kube-api-access-d7crg" (OuterVolumeSpecName: "kube-api-access-d7crg") pod "39e15ba5-8af5-4f81-9298-c80f8e6b22b5" (UID: "39e15ba5-8af5-4f81-9298-c80f8e6b22b5"). InnerVolumeSpecName "kube-api-access-d7crg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.863141 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Dec 15 08:33:34 crc kubenswrapper[4876]: E1215 08:33:34.863909 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39e15ba5-8af5-4f81-9298-c80f8e6b22b5" containerName="mariadb-client-5-default" Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.863932 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="39e15ba5-8af5-4f81-9298-c80f8e6b22b5" containerName="mariadb-client-5-default" Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.864269 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="39e15ba5-8af5-4f81-9298-c80f8e6b22b5" containerName="mariadb-client-5-default" Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.864891 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.871815 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 15 08:33:34 crc kubenswrapper[4876]: I1215 08:33:34.946535 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7crg\" (UniqueName: \"kubernetes.io/projected/39e15ba5-8af5-4f81-9298-c80f8e6b22b5-kube-api-access-d7crg\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:35 crc kubenswrapper[4876]: I1215 08:33:35.047961 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xldrf\" (UniqueName: \"kubernetes.io/projected/89bd2284-e10a-46f4-938e-b5d500526b8f-kube-api-access-xldrf\") pod \"mariadb-client-6-default\" (UID: \"89bd2284-e10a-46f4-938e-b5d500526b8f\") " pod="openstack/mariadb-client-6-default" Dec 15 08:33:35 crc kubenswrapper[4876]: I1215 08:33:35.149094 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xldrf\" (UniqueName: \"kubernetes.io/projected/89bd2284-e10a-46f4-938e-b5d500526b8f-kube-api-access-xldrf\") pod \"mariadb-client-6-default\" (UID: \"89bd2284-e10a-46f4-938e-b5d500526b8f\") " pod="openstack/mariadb-client-6-default" Dec 15 08:33:35 crc kubenswrapper[4876]: I1215 08:33:35.164977 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xldrf\" (UniqueName: \"kubernetes.io/projected/89bd2284-e10a-46f4-938e-b5d500526b8f-kube-api-access-xldrf\") pod \"mariadb-client-6-default\" (UID: \"89bd2284-e10a-46f4-938e-b5d500526b8f\") " pod="openstack/mariadb-client-6-default" Dec 15 08:33:35 crc kubenswrapper[4876]: I1215 08:33:35.206502 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 15 08:33:35 crc kubenswrapper[4876]: I1215 08:33:35.367228 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a086ba6eaf33ebb323adc804ea41c8db56708e054eb010a777cd4688e30a18b9" Dec 15 08:33:35 crc kubenswrapper[4876]: I1215 08:33:35.367337 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Dec 15 08:33:35 crc kubenswrapper[4876]: I1215 08:33:35.770498 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 15 08:33:36 crc kubenswrapper[4876]: I1215 08:33:36.375782 4876 generic.go:334] "Generic (PLEG): container finished" podID="89bd2284-e10a-46f4-938e-b5d500526b8f" containerID="835f793f77bf22dceb559b2a8ba4818d6e7bd2f69a3d8db1d6a7c4b55248c026" exitCode=1 Dec 15 08:33:36 crc kubenswrapper[4876]: I1215 08:33:36.375838 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"89bd2284-e10a-46f4-938e-b5d500526b8f","Type":"ContainerDied","Data":"835f793f77bf22dceb559b2a8ba4818d6e7bd2f69a3d8db1d6a7c4b55248c026"} Dec 15 08:33:36 crc kubenswrapper[4876]: I1215 08:33:36.376138 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"89bd2284-e10a-46f4-938e-b5d500526b8f","Type":"ContainerStarted","Data":"17b8119bcc47a65f4250bab03798e5e7a4e75108357728857a167353e1114da8"} Dec 15 08:33:36 crc kubenswrapper[4876]: I1215 08:33:36.714071 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39e15ba5-8af5-4f81-9298-c80f8e6b22b5" path="/var/lib/kubelet/pods/39e15ba5-8af5-4f81-9298-c80f8e6b22b5/volumes" Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.757848 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.778556 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_89bd2284-e10a-46f4-938e-b5d500526b8f/mariadb-client-6-default/0.log" Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.803650 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.828559 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.893177 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xldrf\" (UniqueName: \"kubernetes.io/projected/89bd2284-e10a-46f4-938e-b5d500526b8f-kube-api-access-xldrf\") pod \"89bd2284-e10a-46f4-938e-b5d500526b8f\" (UID: \"89bd2284-e10a-46f4-938e-b5d500526b8f\") " Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.904686 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89bd2284-e10a-46f4-938e-b5d500526b8f-kube-api-access-xldrf" (OuterVolumeSpecName: "kube-api-access-xldrf") pod "89bd2284-e10a-46f4-938e-b5d500526b8f" (UID: "89bd2284-e10a-46f4-938e-b5d500526b8f"). InnerVolumeSpecName "kube-api-access-xldrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.931017 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Dec 15 08:33:37 crc kubenswrapper[4876]: E1215 08:33:37.931324 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89bd2284-e10a-46f4-938e-b5d500526b8f" containerName="mariadb-client-6-default" Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.931341 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="89bd2284-e10a-46f4-938e-b5d500526b8f" containerName="mariadb-client-6-default" Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.931480 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="89bd2284-e10a-46f4-938e-b5d500526b8f" containerName="mariadb-client-6-default" Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.931998 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.941941 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 15 08:33:37 crc kubenswrapper[4876]: I1215 08:33:37.995010 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xldrf\" (UniqueName: \"kubernetes.io/projected/89bd2284-e10a-46f4-938e-b5d500526b8f-kube-api-access-xldrf\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:38 crc kubenswrapper[4876]: I1215 08:33:38.096897 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lw27j\" (UniqueName: \"kubernetes.io/projected/a1f250fc-5816-4123-a2b2-2ee806966ff2-kube-api-access-lw27j\") pod \"mariadb-client-7-default\" (UID: \"a1f250fc-5816-4123-a2b2-2ee806966ff2\") " pod="openstack/mariadb-client-7-default" Dec 15 08:33:38 crc kubenswrapper[4876]: I1215 08:33:38.198199 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lw27j\" (UniqueName: \"kubernetes.io/projected/a1f250fc-5816-4123-a2b2-2ee806966ff2-kube-api-access-lw27j\") pod \"mariadb-client-7-default\" (UID: \"a1f250fc-5816-4123-a2b2-2ee806966ff2\") " pod="openstack/mariadb-client-7-default" Dec 15 08:33:38 crc kubenswrapper[4876]: I1215 08:33:38.220605 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lw27j\" (UniqueName: \"kubernetes.io/projected/a1f250fc-5816-4123-a2b2-2ee806966ff2-kube-api-access-lw27j\") pod \"mariadb-client-7-default\" (UID: \"a1f250fc-5816-4123-a2b2-2ee806966ff2\") " pod="openstack/mariadb-client-7-default" Dec 15 08:33:38 crc kubenswrapper[4876]: I1215 08:33:38.253048 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 15 08:33:38 crc kubenswrapper[4876]: I1215 08:33:38.390692 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17b8119bcc47a65f4250bab03798e5e7a4e75108357728857a167353e1114da8" Dec 15 08:33:38 crc kubenswrapper[4876]: I1215 08:33:38.390767 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Dec 15 08:33:38 crc kubenswrapper[4876]: I1215 08:33:38.527537 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 15 08:33:38 crc kubenswrapper[4876]: W1215 08:33:38.531704 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1f250fc_5816_4123_a2b2_2ee806966ff2.slice/crio-f2ffcaa5950b31021b50c9c5cc0474cbe156ecc9859c215d515f3b3ec0853dfd WatchSource:0}: Error finding container f2ffcaa5950b31021b50c9c5cc0474cbe156ecc9859c215d515f3b3ec0853dfd: Status 404 returned error can't find the container with id f2ffcaa5950b31021b50c9c5cc0474cbe156ecc9859c215d515f3b3ec0853dfd Dec 15 08:33:38 crc kubenswrapper[4876]: I1215 08:33:38.715576 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89bd2284-e10a-46f4-938e-b5d500526b8f" path="/var/lib/kubelet/pods/89bd2284-e10a-46f4-938e-b5d500526b8f/volumes" Dec 15 08:33:39 crc kubenswrapper[4876]: I1215 08:33:39.396710 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:39 crc kubenswrapper[4876]: I1215 08:33:39.398830 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:39 crc kubenswrapper[4876]: I1215 08:33:39.405663 4876 generic.go:334] "Generic (PLEG): container finished" podID="a1f250fc-5816-4123-a2b2-2ee806966ff2" containerID="d194bd3f33c3f04cf41076d25fd958168aa5bd0e55f81fe56c86285d6c3d345e" exitCode=0 Dec 15 08:33:39 crc kubenswrapper[4876]: I1215 08:33:39.405706 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"a1f250fc-5816-4123-a2b2-2ee806966ff2","Type":"ContainerDied","Data":"d194bd3f33c3f04cf41076d25fd958168aa5bd0e55f81fe56c86285d6c3d345e"} Dec 15 08:33:39 crc kubenswrapper[4876]: I1215 08:33:39.405740 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"a1f250fc-5816-4123-a2b2-2ee806966ff2","Type":"ContainerStarted","Data":"f2ffcaa5950b31021b50c9c5cc0474cbe156ecc9859c215d515f3b3ec0853dfd"} Dec 15 08:33:39 crc kubenswrapper[4876]: I1215 08:33:39.446678 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.463544 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.513400 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9x77v"] Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.758426 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.775390 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_a1f250fc-5816-4123-a2b2-2ee806966ff2/mariadb-client-7-default/0.log" Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.803568 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.810666 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.922754 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Dec 15 08:33:40 crc kubenswrapper[4876]: E1215 08:33:40.923185 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1f250fc-5816-4123-a2b2-2ee806966ff2" containerName="mariadb-client-7-default" Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.923207 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1f250fc-5816-4123-a2b2-2ee806966ff2" containerName="mariadb-client-7-default" Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.923399 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1f250fc-5816-4123-a2b2-2ee806966ff2" containerName="mariadb-client-7-default" Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.923991 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.931572 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.942028 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lw27j\" (UniqueName: \"kubernetes.io/projected/a1f250fc-5816-4123-a2b2-2ee806966ff2-kube-api-access-lw27j\") pod \"a1f250fc-5816-4123-a2b2-2ee806966ff2\" (UID: \"a1f250fc-5816-4123-a2b2-2ee806966ff2\") " Dec 15 08:33:40 crc kubenswrapper[4876]: I1215 08:33:40.948329 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1f250fc-5816-4123-a2b2-2ee806966ff2-kube-api-access-lw27j" (OuterVolumeSpecName: "kube-api-access-lw27j") pod "a1f250fc-5816-4123-a2b2-2ee806966ff2" (UID: "a1f250fc-5816-4123-a2b2-2ee806966ff2"). InnerVolumeSpecName "kube-api-access-lw27j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:33:41 crc kubenswrapper[4876]: I1215 08:33:41.044571 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2thr\" (UniqueName: \"kubernetes.io/projected/5ca1c271-a7f7-4acf-a81c-4a15627bb170-kube-api-access-z2thr\") pod \"mariadb-client-2\" (UID: \"5ca1c271-a7f7-4acf-a81c-4a15627bb170\") " pod="openstack/mariadb-client-2" Dec 15 08:33:41 crc kubenswrapper[4876]: I1215 08:33:41.044676 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lw27j\" (UniqueName: \"kubernetes.io/projected/a1f250fc-5816-4123-a2b2-2ee806966ff2-kube-api-access-lw27j\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:41 crc kubenswrapper[4876]: I1215 08:33:41.146878 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2thr\" (UniqueName: \"kubernetes.io/projected/5ca1c271-a7f7-4acf-a81c-4a15627bb170-kube-api-access-z2thr\") pod \"mariadb-client-2\" (UID: \"5ca1c271-a7f7-4acf-a81c-4a15627bb170\") " pod="openstack/mariadb-client-2" Dec 15 08:33:41 crc kubenswrapper[4876]: I1215 08:33:41.163656 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2thr\" (UniqueName: \"kubernetes.io/projected/5ca1c271-a7f7-4acf-a81c-4a15627bb170-kube-api-access-z2thr\") pod \"mariadb-client-2\" (UID: \"5ca1c271-a7f7-4acf-a81c-4a15627bb170\") " pod="openstack/mariadb-client-2" Dec 15 08:33:41 crc kubenswrapper[4876]: I1215 08:33:41.246182 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 15 08:33:41 crc kubenswrapper[4876]: I1215 08:33:41.451892 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Dec 15 08:33:41 crc kubenswrapper[4876]: I1215 08:33:41.452213 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2ffcaa5950b31021b50c9c5cc0474cbe156ecc9859c215d515f3b3ec0853dfd" Dec 15 08:33:41 crc kubenswrapper[4876]: I1215 08:33:41.789783 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.460334 4876 generic.go:334] "Generic (PLEG): container finished" podID="5ca1c271-a7f7-4acf-a81c-4a15627bb170" containerID="5b7aac28ee435bf7f826b75933e6f5c0a202e75a686db7bf9144479a5d8e24f6" exitCode=0 Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.460495 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"5ca1c271-a7f7-4acf-a81c-4a15627bb170","Type":"ContainerDied","Data":"5b7aac28ee435bf7f826b75933e6f5c0a202e75a686db7bf9144479a5d8e24f6"} Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.460783 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"5ca1c271-a7f7-4acf-a81c-4a15627bb170","Type":"ContainerStarted","Data":"09f5811001ce6472b131680b4ac6558ec1f9ec7aef1b6a0e3c853ff55244bafd"} Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.460850 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9x77v" podUID="74fc7ef3-4346-4921-894f-18f66c445434" containerName="registry-server" containerID="cri-o://70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098" gracePeriod=2 Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.715951 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1f250fc-5816-4123-a2b2-2ee806966ff2" path="/var/lib/kubelet/pods/a1f250fc-5816-4123-a2b2-2ee806966ff2/volumes" Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.863492 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.977453 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-catalog-content\") pod \"74fc7ef3-4346-4921-894f-18f66c445434\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.977586 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-utilities\") pod \"74fc7ef3-4346-4921-894f-18f66c445434\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.977624 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjfh9\" (UniqueName: \"kubernetes.io/projected/74fc7ef3-4346-4921-894f-18f66c445434-kube-api-access-bjfh9\") pod \"74fc7ef3-4346-4921-894f-18f66c445434\" (UID: \"74fc7ef3-4346-4921-894f-18f66c445434\") " Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.978865 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-utilities" (OuterVolumeSpecName: "utilities") pod "74fc7ef3-4346-4921-894f-18f66c445434" (UID: "74fc7ef3-4346-4921-894f-18f66c445434"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:33:42 crc kubenswrapper[4876]: I1215 08:33:42.989330 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74fc7ef3-4346-4921-894f-18f66c445434-kube-api-access-bjfh9" (OuterVolumeSpecName: "kube-api-access-bjfh9") pod "74fc7ef3-4346-4921-894f-18f66c445434" (UID: "74fc7ef3-4346-4921-894f-18f66c445434"). InnerVolumeSpecName "kube-api-access-bjfh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.079286 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.079320 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjfh9\" (UniqueName: \"kubernetes.io/projected/74fc7ef3-4346-4921-894f-18f66c445434-kube-api-access-bjfh9\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.469675 4876 generic.go:334] "Generic (PLEG): container finished" podID="74fc7ef3-4346-4921-894f-18f66c445434" containerID="70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098" exitCode=0 Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.469761 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9x77v" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.469777 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9x77v" event={"ID":"74fc7ef3-4346-4921-894f-18f66c445434","Type":"ContainerDied","Data":"70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098"} Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.469824 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9x77v" event={"ID":"74fc7ef3-4346-4921-894f-18f66c445434","Type":"ContainerDied","Data":"090d917cee369dc6a7610f9044e2ee64ab48d23cf7b71ab435916be68a57e044"} Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.469852 4876 scope.go:117] "RemoveContainer" containerID="70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.503607 4876 scope.go:117] "RemoveContainer" containerID="6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.524623 4876 scope.go:117] "RemoveContainer" containerID="94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.563827 4876 scope.go:117] "RemoveContainer" containerID="70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098" Dec 15 08:33:43 crc kubenswrapper[4876]: E1215 08:33:43.565472 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098\": container with ID starting with 70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098 not found: ID does not exist" containerID="70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.565507 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098"} err="failed to get container status \"70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098\": rpc error: code = NotFound desc = could not find container \"70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098\": container with ID starting with 70417548d2de7913fb5ef7bf00007cebe05ae5888772f3aae52d4f74e298c098 not found: ID does not exist" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.565537 4876 scope.go:117] "RemoveContainer" containerID="6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97" Dec 15 08:33:43 crc kubenswrapper[4876]: E1215 08:33:43.565862 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97\": container with ID starting with 6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97 not found: ID does not exist" containerID="6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.565880 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97"} err="failed to get container status \"6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97\": rpc error: code = NotFound desc = could not find container \"6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97\": container with ID starting with 6c9f1b979a1cefb716c2021c706236c368a416c7b7d87a8734274bc1e362ea97 not found: ID does not exist" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.565892 4876 scope.go:117] "RemoveContainer" containerID="94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e" Dec 15 08:33:43 crc kubenswrapper[4876]: E1215 08:33:43.566094 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e\": container with ID starting with 94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e not found: ID does not exist" containerID="94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.566170 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e"} err="failed to get container status \"94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e\": rpc error: code = NotFound desc = could not find container \"94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e\": container with ID starting with 94f40ccad55c8556b9e658c9488b5d46c9aef1054b17d3412674633e8ae7bc5e not found: ID does not exist" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.874540 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.891524 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_5ca1c271-a7f7-4acf-a81c-4a15627bb170/mariadb-client-2/0.log" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.891927 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2thr\" (UniqueName: \"kubernetes.io/projected/5ca1c271-a7f7-4acf-a81c-4a15627bb170-kube-api-access-z2thr\") pod \"5ca1c271-a7f7-4acf-a81c-4a15627bb170\" (UID: \"5ca1c271-a7f7-4acf-a81c-4a15627bb170\") " Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.896168 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ca1c271-a7f7-4acf-a81c-4a15627bb170-kube-api-access-z2thr" (OuterVolumeSpecName: "kube-api-access-z2thr") pod "5ca1c271-a7f7-4acf-a81c-4a15627bb170" (UID: "5ca1c271-a7f7-4acf-a81c-4a15627bb170"). InnerVolumeSpecName "kube-api-access-z2thr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.920752 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.927174 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Dec 15 08:33:43 crc kubenswrapper[4876]: I1215 08:33:43.993221 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2thr\" (UniqueName: \"kubernetes.io/projected/5ca1c271-a7f7-4acf-a81c-4a15627bb170-kube-api-access-z2thr\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:44 crc kubenswrapper[4876]: I1215 08:33:44.287633 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "74fc7ef3-4346-4921-894f-18f66c445434" (UID: "74fc7ef3-4346-4921-894f-18f66c445434"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:33:44 crc kubenswrapper[4876]: I1215 08:33:44.297826 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74fc7ef3-4346-4921-894f-18f66c445434-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:33:44 crc kubenswrapper[4876]: I1215 08:33:44.408424 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9x77v"] Dec 15 08:33:44 crc kubenswrapper[4876]: I1215 08:33:44.415601 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9x77v"] Dec 15 08:33:44 crc kubenswrapper[4876]: I1215 08:33:44.485841 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09f5811001ce6472b131680b4ac6558ec1f9ec7aef1b6a0e3c853ff55244bafd" Dec 15 08:33:44 crc kubenswrapper[4876]: I1215 08:33:44.485901 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Dec 15 08:33:44 crc kubenswrapper[4876]: I1215 08:33:44.716161 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ca1c271-a7f7-4acf-a81c-4a15627bb170" path="/var/lib/kubelet/pods/5ca1c271-a7f7-4acf-a81c-4a15627bb170/volumes" Dec 15 08:33:44 crc kubenswrapper[4876]: I1215 08:33:44.716863 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74fc7ef3-4346-4921-894f-18f66c445434" path="/var/lib/kubelet/pods/74fc7ef3-4346-4921-894f-18f66c445434/volumes" Dec 15 08:33:57 crc kubenswrapper[4876]: I1215 08:33:57.323069 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:33:57 crc kubenswrapper[4876]: I1215 08:33:57.323680 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:33:57 crc kubenswrapper[4876]: I1215 08:33:57.323731 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 08:33:57 crc kubenswrapper[4876]: I1215 08:33:57.324404 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1eab63760fd711d89164cf5cecc7ad58b6c061babe54b71fb0d87b4bc12245dd"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 08:33:57 crc kubenswrapper[4876]: I1215 08:33:57.324465 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://1eab63760fd711d89164cf5cecc7ad58b6c061babe54b71fb0d87b4bc12245dd" gracePeriod=600 Dec 15 08:33:57 crc kubenswrapper[4876]: I1215 08:33:57.577132 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="1eab63760fd711d89164cf5cecc7ad58b6c061babe54b71fb0d87b4bc12245dd" exitCode=0 Dec 15 08:33:57 crc kubenswrapper[4876]: I1215 08:33:57.577181 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"1eab63760fd711d89164cf5cecc7ad58b6c061babe54b71fb0d87b4bc12245dd"} Dec 15 08:33:57 crc kubenswrapper[4876]: I1215 08:33:57.577509 4876 scope.go:117] "RemoveContainer" containerID="e2bf4162d3b45fe2fcce64c1dfff9f9a2919361e38403d910b59b1f562931a78" Dec 15 08:33:58 crc kubenswrapper[4876]: I1215 08:33:58.588393 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a"} Dec 15 08:35:23 crc kubenswrapper[4876]: I1215 08:35:23.513410 4876 scope.go:117] "RemoveContainer" containerID="7926936bc9ba6368a30dd972ab4e1b4c63238bf7c0b05e7328d0258c104e6253" Dec 15 08:35:57 crc kubenswrapper[4876]: I1215 08:35:57.323569 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:35:57 crc kubenswrapper[4876]: I1215 08:35:57.324328 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:36:27 crc kubenswrapper[4876]: I1215 08:36:27.322545 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:36:27 crc kubenswrapper[4876]: I1215 08:36:27.323123 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.500595 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2xcfq"] Dec 15 08:36:29 crc kubenswrapper[4876]: E1215 08:36:29.501951 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ca1c271-a7f7-4acf-a81c-4a15627bb170" containerName="mariadb-client-2" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.501986 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ca1c271-a7f7-4acf-a81c-4a15627bb170" containerName="mariadb-client-2" Dec 15 08:36:29 crc kubenswrapper[4876]: E1215 08:36:29.502026 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74fc7ef3-4346-4921-894f-18f66c445434" containerName="registry-server" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.502035 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="74fc7ef3-4346-4921-894f-18f66c445434" containerName="registry-server" Dec 15 08:36:29 crc kubenswrapper[4876]: E1215 08:36:29.502056 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74fc7ef3-4346-4921-894f-18f66c445434" containerName="extract-utilities" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.502065 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="74fc7ef3-4346-4921-894f-18f66c445434" containerName="extract-utilities" Dec 15 08:36:29 crc kubenswrapper[4876]: E1215 08:36:29.502091 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74fc7ef3-4346-4921-894f-18f66c445434" containerName="extract-content" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.502119 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="74fc7ef3-4346-4921-894f-18f66c445434" containerName="extract-content" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.502342 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="74fc7ef3-4346-4921-894f-18f66c445434" containerName="registry-server" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.502372 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ca1c271-a7f7-4acf-a81c-4a15627bb170" containerName="mariadb-client-2" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.504187 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.507540 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2xcfq"] Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.621186 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-utilities\") pod \"community-operators-2xcfq\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.621245 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-catalog-content\") pod \"community-operators-2xcfq\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.621488 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csgj4\" (UniqueName: \"kubernetes.io/projected/70e60be3-70c1-46b3-a7e1-3072c367dcc4-kube-api-access-csgj4\") pod \"community-operators-2xcfq\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.723244 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csgj4\" (UniqueName: \"kubernetes.io/projected/70e60be3-70c1-46b3-a7e1-3072c367dcc4-kube-api-access-csgj4\") pod \"community-operators-2xcfq\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.723330 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-utilities\") pod \"community-operators-2xcfq\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.723368 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-catalog-content\") pod \"community-operators-2xcfq\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.724043 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-catalog-content\") pod \"community-operators-2xcfq\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.724055 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-utilities\") pod \"community-operators-2xcfq\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.744456 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csgj4\" (UniqueName: \"kubernetes.io/projected/70e60be3-70c1-46b3-a7e1-3072c367dcc4-kube-api-access-csgj4\") pod \"community-operators-2xcfq\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:29 crc kubenswrapper[4876]: I1215 08:36:29.826969 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:30 crc kubenswrapper[4876]: I1215 08:36:30.305962 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2xcfq"] Dec 15 08:36:30 crc kubenswrapper[4876]: E1215 08:36:30.627742 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70e60be3_70c1_46b3_a7e1_3072c367dcc4.slice/crio-conmon-343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d.scope\": RecentStats: unable to find data in memory cache]" Dec 15 08:36:30 crc kubenswrapper[4876]: I1215 08:36:30.947275 4876 generic.go:334] "Generic (PLEG): container finished" podID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerID="343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d" exitCode=0 Dec 15 08:36:30 crc kubenswrapper[4876]: I1215 08:36:30.947318 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xcfq" event={"ID":"70e60be3-70c1-46b3-a7e1-3072c367dcc4","Type":"ContainerDied","Data":"343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d"} Dec 15 08:36:30 crc kubenswrapper[4876]: I1215 08:36:30.947344 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xcfq" event={"ID":"70e60be3-70c1-46b3-a7e1-3072c367dcc4","Type":"ContainerStarted","Data":"6ea4622fda3f61978aa463310c081e6478a9526fef9dd540d0135a22dfcd710c"} Dec 15 08:36:30 crc kubenswrapper[4876]: I1215 08:36:30.949663 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 08:36:31 crc kubenswrapper[4876]: I1215 08:36:31.964211 4876 generic.go:334] "Generic (PLEG): container finished" podID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerID="acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61" exitCode=0 Dec 15 08:36:31 crc kubenswrapper[4876]: I1215 08:36:31.964286 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xcfq" event={"ID":"70e60be3-70c1-46b3-a7e1-3072c367dcc4","Type":"ContainerDied","Data":"acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61"} Dec 15 08:36:32 crc kubenswrapper[4876]: I1215 08:36:32.973144 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xcfq" event={"ID":"70e60be3-70c1-46b3-a7e1-3072c367dcc4","Type":"ContainerStarted","Data":"f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6"} Dec 15 08:36:32 crc kubenswrapper[4876]: I1215 08:36:32.994286 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2xcfq" podStartSLOduration=2.57166412 podStartE2EDuration="3.994270256s" podCreationTimestamp="2025-12-15 08:36:29 +0000 UTC" firstStartedPulling="2025-12-15 08:36:30.949323219 +0000 UTC m=+6316.520466130" lastFinishedPulling="2025-12-15 08:36:32.371929355 +0000 UTC m=+6317.943072266" observedRunningTime="2025-12-15 08:36:32.988334294 +0000 UTC m=+6318.559477205" watchObservedRunningTime="2025-12-15 08:36:32.994270256 +0000 UTC m=+6318.565413167" Dec 15 08:36:39 crc kubenswrapper[4876]: I1215 08:36:39.828240 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:39 crc kubenswrapper[4876]: I1215 08:36:39.828671 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:39 crc kubenswrapper[4876]: I1215 08:36:39.881691 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:40 crc kubenswrapper[4876]: I1215 08:36:40.072178 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:40 crc kubenswrapper[4876]: I1215 08:36:40.117186 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2xcfq"] Dec 15 08:36:42 crc kubenswrapper[4876]: I1215 08:36:42.033563 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2xcfq" podUID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerName="registry-server" containerID="cri-o://f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6" gracePeriod=2 Dec 15 08:36:42 crc kubenswrapper[4876]: I1215 08:36:42.997077 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.050555 4876 generic.go:334] "Generic (PLEG): container finished" podID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerID="f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6" exitCode=0 Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.050602 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xcfq" event={"ID":"70e60be3-70c1-46b3-a7e1-3072c367dcc4","Type":"ContainerDied","Data":"f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6"} Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.050631 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xcfq" event={"ID":"70e60be3-70c1-46b3-a7e1-3072c367dcc4","Type":"ContainerDied","Data":"6ea4622fda3f61978aa463310c081e6478a9526fef9dd540d0135a22dfcd710c"} Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.050653 4876 scope.go:117] "RemoveContainer" containerID="f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.050788 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2xcfq" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.090577 4876 scope.go:117] "RemoveContainer" containerID="acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.123811 4876 scope.go:117] "RemoveContainer" containerID="343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.135880 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-catalog-content\") pod \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.135967 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csgj4\" (UniqueName: \"kubernetes.io/projected/70e60be3-70c1-46b3-a7e1-3072c367dcc4-kube-api-access-csgj4\") pod \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.136057 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-utilities\") pod \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\" (UID: \"70e60be3-70c1-46b3-a7e1-3072c367dcc4\") " Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.137029 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-utilities" (OuterVolumeSpecName: "utilities") pod "70e60be3-70c1-46b3-a7e1-3072c367dcc4" (UID: "70e60be3-70c1-46b3-a7e1-3072c367dcc4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.142490 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70e60be3-70c1-46b3-a7e1-3072c367dcc4-kube-api-access-csgj4" (OuterVolumeSpecName: "kube-api-access-csgj4") pod "70e60be3-70c1-46b3-a7e1-3072c367dcc4" (UID: "70e60be3-70c1-46b3-a7e1-3072c367dcc4"). InnerVolumeSpecName "kube-api-access-csgj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.153801 4876 scope.go:117] "RemoveContainer" containerID="f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6" Dec 15 08:36:43 crc kubenswrapper[4876]: E1215 08:36:43.154823 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6\": container with ID starting with f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6 not found: ID does not exist" containerID="f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.154858 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6"} err="failed to get container status \"f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6\": rpc error: code = NotFound desc = could not find container \"f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6\": container with ID starting with f66f30791dc5a2be6db9d61d30fa91f1258c2a6a7bae43fe7c929bae756c35f6 not found: ID does not exist" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.154878 4876 scope.go:117] "RemoveContainer" containerID="acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61" Dec 15 08:36:43 crc kubenswrapper[4876]: E1215 08:36:43.155158 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61\": container with ID starting with acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61 not found: ID does not exist" containerID="acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.155180 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61"} err="failed to get container status \"acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61\": rpc error: code = NotFound desc = could not find container \"acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61\": container with ID starting with acfeaca31dde3968c703c863f627989c088dd0b1cf1969e710c627d6e46d7b61 not found: ID does not exist" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.155193 4876 scope.go:117] "RemoveContainer" containerID="343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d" Dec 15 08:36:43 crc kubenswrapper[4876]: E1215 08:36:43.155548 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d\": container with ID starting with 343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d not found: ID does not exist" containerID="343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.155568 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d"} err="failed to get container status \"343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d\": rpc error: code = NotFound desc = could not find container \"343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d\": container with ID starting with 343d318c80dd7b24ce1a05d1546dc16eaa752dcdd8c7b51a25ef68639163bd2d not found: ID does not exist" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.190221 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "70e60be3-70c1-46b3-a7e1-3072c367dcc4" (UID: "70e60be3-70c1-46b3-a7e1-3072c367dcc4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.238122 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.238154 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70e60be3-70c1-46b3-a7e1-3072c367dcc4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.238166 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csgj4\" (UniqueName: \"kubernetes.io/projected/70e60be3-70c1-46b3-a7e1-3072c367dcc4-kube-api-access-csgj4\") on node \"crc\" DevicePath \"\"" Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.378786 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2xcfq"] Dec 15 08:36:43 crc kubenswrapper[4876]: I1215 08:36:43.385825 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2xcfq"] Dec 15 08:36:44 crc kubenswrapper[4876]: I1215 08:36:44.713650 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" path="/var/lib/kubelet/pods/70e60be3-70c1-46b3-a7e1-3072c367dcc4/volumes" Dec 15 08:36:57 crc kubenswrapper[4876]: I1215 08:36:57.322626 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:36:57 crc kubenswrapper[4876]: I1215 08:36:57.323060 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:36:57 crc kubenswrapper[4876]: I1215 08:36:57.323130 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 08:36:57 crc kubenswrapper[4876]: I1215 08:36:57.323773 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 08:36:57 crc kubenswrapper[4876]: I1215 08:36:57.323837 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" gracePeriod=600 Dec 15 08:36:57 crc kubenswrapper[4876]: E1215 08:36:57.463420 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:36:58 crc kubenswrapper[4876]: I1215 08:36:58.154578 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" exitCode=0 Dec 15 08:36:58 crc kubenswrapper[4876]: I1215 08:36:58.154645 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a"} Dec 15 08:36:58 crc kubenswrapper[4876]: I1215 08:36:58.154702 4876 scope.go:117] "RemoveContainer" containerID="1eab63760fd711d89164cf5cecc7ad58b6c061babe54b71fb0d87b4bc12245dd" Dec 15 08:36:58 crc kubenswrapper[4876]: I1215 08:36:58.155319 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:36:58 crc kubenswrapper[4876]: E1215 08:36:58.155579 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:37:08 crc kubenswrapper[4876]: I1215 08:37:08.706480 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:37:08 crc kubenswrapper[4876]: E1215 08:37:08.709353 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:37:19 crc kubenswrapper[4876]: I1215 08:37:19.705991 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:37:19 crc kubenswrapper[4876]: E1215 08:37:19.707073 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:37:30 crc kubenswrapper[4876]: I1215 08:37:30.706476 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:37:30 crc kubenswrapper[4876]: E1215 08:37:30.707487 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:37:44 crc kubenswrapper[4876]: I1215 08:37:44.711212 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:37:44 crc kubenswrapper[4876]: E1215 08:37:44.713241 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:37:56 crc kubenswrapper[4876]: I1215 08:37:56.705758 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:37:56 crc kubenswrapper[4876]: E1215 08:37:56.706688 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:38:11 crc kubenswrapper[4876]: I1215 08:38:11.706025 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:38:11 crc kubenswrapper[4876]: E1215 08:38:11.707136 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:38:26 crc kubenswrapper[4876]: I1215 08:38:26.706381 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:38:26 crc kubenswrapper[4876]: E1215 08:38:26.707209 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:38:38 crc kubenswrapper[4876]: I1215 08:38:38.705802 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:38:38 crc kubenswrapper[4876]: E1215 08:38:38.706857 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:38:49 crc kubenswrapper[4876]: I1215 08:38:49.705907 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:38:49 crc kubenswrapper[4876]: E1215 08:38:49.706670 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:39:04 crc kubenswrapper[4876]: I1215 08:39:04.710315 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:39:04 crc kubenswrapper[4876]: E1215 08:39:04.712604 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:39:15 crc kubenswrapper[4876]: I1215 08:39:15.705678 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:39:15 crc kubenswrapper[4876]: E1215 08:39:15.706465 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:39:23 crc kubenswrapper[4876]: I1215 08:39:23.629921 4876 scope.go:117] "RemoveContainer" containerID="0f6b5806f023f6337ba80157c89fdcb8c7acbbd27216588567037f03ff215724" Dec 15 08:39:29 crc kubenswrapper[4876]: I1215 08:39:29.705528 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:39:29 crc kubenswrapper[4876]: E1215 08:39:29.707244 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:39:41 crc kubenswrapper[4876]: I1215 08:39:41.705866 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:39:41 crc kubenswrapper[4876]: E1215 08:39:41.706708 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:39:56 crc kubenswrapper[4876]: I1215 08:39:56.705756 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:39:56 crc kubenswrapper[4876]: E1215 08:39:56.706596 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:40:08 crc kubenswrapper[4876]: I1215 08:40:08.705403 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:40:08 crc kubenswrapper[4876]: E1215 08:40:08.706231 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:40:21 crc kubenswrapper[4876]: I1215 08:40:21.706278 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:40:21 crc kubenswrapper[4876]: E1215 08:40:21.707301 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:40:23 crc kubenswrapper[4876]: I1215 08:40:23.672207 4876 scope.go:117] "RemoveContainer" containerID="835f793f77bf22dceb559b2a8ba4818d6e7bd2f69a3d8db1d6a7c4b55248c026" Dec 15 08:40:23 crc kubenswrapper[4876]: I1215 08:40:23.699988 4876 scope.go:117] "RemoveContainer" containerID="5b7aac28ee435bf7f826b75933e6f5c0a202e75a686db7bf9144479a5d8e24f6" Dec 15 08:40:23 crc kubenswrapper[4876]: I1215 08:40:23.743248 4876 scope.go:117] "RemoveContainer" containerID="0c097805664b721773bfd601f09489a4d6473be5895dc26c236072f8140b1f55" Dec 15 08:40:23 crc kubenswrapper[4876]: I1215 08:40:23.777189 4876 scope.go:117] "RemoveContainer" containerID="d194bd3f33c3f04cf41076d25fd958168aa5bd0e55f81fe56c86285d6c3d345e" Dec 15 08:40:23 crc kubenswrapper[4876]: I1215 08:40:23.806321 4876 scope.go:117] "RemoveContainer" containerID="0ed240638d9ecce3e64f4b2211456ac55febbf4f5b4ae68abe2e4bae192db206" Dec 15 08:40:33 crc kubenswrapper[4876]: I1215 08:40:33.706097 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:40:33 crc kubenswrapper[4876]: E1215 08:40:33.707008 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:40:48 crc kubenswrapper[4876]: I1215 08:40:48.705038 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:40:48 crc kubenswrapper[4876]: E1215 08:40:48.705902 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:41:02 crc kubenswrapper[4876]: I1215 08:41:02.706288 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:41:02 crc kubenswrapper[4876]: E1215 08:41:02.706998 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:41:15 crc kubenswrapper[4876]: I1215 08:41:15.707621 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:41:15 crc kubenswrapper[4876]: E1215 08:41:15.708405 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:41:24 crc kubenswrapper[4876]: I1215 08:41:24.847735 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g4fpk"] Dec 15 08:41:24 crc kubenswrapper[4876]: E1215 08:41:24.848510 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerName="extract-utilities" Dec 15 08:41:24 crc kubenswrapper[4876]: I1215 08:41:24.848533 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerName="extract-utilities" Dec 15 08:41:24 crc kubenswrapper[4876]: E1215 08:41:24.848554 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerName="registry-server" Dec 15 08:41:24 crc kubenswrapper[4876]: I1215 08:41:24.848561 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerName="registry-server" Dec 15 08:41:24 crc kubenswrapper[4876]: E1215 08:41:24.848579 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerName="extract-content" Dec 15 08:41:24 crc kubenswrapper[4876]: I1215 08:41:24.848586 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerName="extract-content" Dec 15 08:41:24 crc kubenswrapper[4876]: I1215 08:41:24.848781 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="70e60be3-70c1-46b3-a7e1-3072c367dcc4" containerName="registry-server" Dec 15 08:41:24 crc kubenswrapper[4876]: I1215 08:41:24.850048 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:24 crc kubenswrapper[4876]: I1215 08:41:24.858331 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g4fpk"] Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.020545 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-catalog-content\") pod \"redhat-marketplace-g4fpk\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.020625 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-utilities\") pod \"redhat-marketplace-g4fpk\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.020689 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq776\" (UniqueName: \"kubernetes.io/projected/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-kube-api-access-mq776\") pod \"redhat-marketplace-g4fpk\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.122285 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-catalog-content\") pod \"redhat-marketplace-g4fpk\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.122392 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-utilities\") pod \"redhat-marketplace-g4fpk\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.122436 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq776\" (UniqueName: \"kubernetes.io/projected/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-kube-api-access-mq776\") pod \"redhat-marketplace-g4fpk\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.122829 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-catalog-content\") pod \"redhat-marketplace-g4fpk\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.122845 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-utilities\") pod \"redhat-marketplace-g4fpk\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.142690 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq776\" (UniqueName: \"kubernetes.io/projected/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-kube-api-access-mq776\") pod \"redhat-marketplace-g4fpk\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.169624 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:25 crc kubenswrapper[4876]: I1215 08:41:25.676605 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g4fpk"] Dec 15 08:41:26 crc kubenswrapper[4876]: I1215 08:41:26.115292 4876 generic.go:334] "Generic (PLEG): container finished" podID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerID="72899f09769291221b8ee6964adf17f6597939d31c509a11722e7d19b797a276" exitCode=0 Dec 15 08:41:26 crc kubenswrapper[4876]: I1215 08:41:26.115397 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4fpk" event={"ID":"7087b701-d4c8-4313-ab89-0ba44d4ce7e4","Type":"ContainerDied","Data":"72899f09769291221b8ee6964adf17f6597939d31c509a11722e7d19b797a276"} Dec 15 08:41:26 crc kubenswrapper[4876]: I1215 08:41:26.115683 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4fpk" event={"ID":"7087b701-d4c8-4313-ab89-0ba44d4ce7e4","Type":"ContainerStarted","Data":"648e398f09c9359bcd8fe130bf0d300c28a315c709d5a42109275797596b0f25"} Dec 15 08:41:27 crc kubenswrapper[4876]: I1215 08:41:27.124644 4876 generic.go:334] "Generic (PLEG): container finished" podID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerID="387ce04ae0b8f7261e25023a161c1086e9452dec53132192e0d05e966f57d968" exitCode=0 Dec 15 08:41:27 crc kubenswrapper[4876]: I1215 08:41:27.124694 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4fpk" event={"ID":"7087b701-d4c8-4313-ab89-0ba44d4ce7e4","Type":"ContainerDied","Data":"387ce04ae0b8f7261e25023a161c1086e9452dec53132192e0d05e966f57d968"} Dec 15 08:41:28 crc kubenswrapper[4876]: I1215 08:41:28.135576 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4fpk" event={"ID":"7087b701-d4c8-4313-ab89-0ba44d4ce7e4","Type":"ContainerStarted","Data":"1aa78640eb59af7c2232202f3269e91f03938b2b1aa092ff1fc393733a168580"} Dec 15 08:41:28 crc kubenswrapper[4876]: I1215 08:41:28.163030 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g4fpk" podStartSLOduration=2.446661587 podStartE2EDuration="4.162878608s" podCreationTimestamp="2025-12-15 08:41:24 +0000 UTC" firstStartedPulling="2025-12-15 08:41:26.11669406 +0000 UTC m=+6611.687836971" lastFinishedPulling="2025-12-15 08:41:27.832911081 +0000 UTC m=+6613.404053992" observedRunningTime="2025-12-15 08:41:28.159204499 +0000 UTC m=+6613.730347410" watchObservedRunningTime="2025-12-15 08:41:28.162878608 +0000 UTC m=+6613.734021539" Dec 15 08:41:29 crc kubenswrapper[4876]: I1215 08:41:29.705618 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:41:29 crc kubenswrapper[4876]: E1215 08:41:29.705900 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:41:35 crc kubenswrapper[4876]: I1215 08:41:35.169945 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:35 crc kubenswrapper[4876]: I1215 08:41:35.170635 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:35 crc kubenswrapper[4876]: I1215 08:41:35.219269 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:35 crc kubenswrapper[4876]: I1215 08:41:35.261614 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:35 crc kubenswrapper[4876]: I1215 08:41:35.469129 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g4fpk"] Dec 15 08:41:37 crc kubenswrapper[4876]: I1215 08:41:37.198240 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g4fpk" podUID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerName="registry-server" containerID="cri-o://1aa78640eb59af7c2232202f3269e91f03938b2b1aa092ff1fc393733a168580" gracePeriod=2 Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.208002 4876 generic.go:334] "Generic (PLEG): container finished" podID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerID="1aa78640eb59af7c2232202f3269e91f03938b2b1aa092ff1fc393733a168580" exitCode=0 Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.208080 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4fpk" event={"ID":"7087b701-d4c8-4313-ab89-0ba44d4ce7e4","Type":"ContainerDied","Data":"1aa78640eb59af7c2232202f3269e91f03938b2b1aa092ff1fc393733a168580"} Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.208276 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g4fpk" event={"ID":"7087b701-d4c8-4313-ab89-0ba44d4ce7e4","Type":"ContainerDied","Data":"648e398f09c9359bcd8fe130bf0d300c28a315c709d5a42109275797596b0f25"} Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.208289 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="648e398f09c9359bcd8fe130bf0d300c28a315c709d5a42109275797596b0f25" Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.209566 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.307200 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq776\" (UniqueName: \"kubernetes.io/projected/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-kube-api-access-mq776\") pod \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.307424 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-utilities\") pod \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.307486 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-catalog-content\") pod \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\" (UID: \"7087b701-d4c8-4313-ab89-0ba44d4ce7e4\") " Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.309073 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-utilities" (OuterVolumeSpecName: "utilities") pod "7087b701-d4c8-4313-ab89-0ba44d4ce7e4" (UID: "7087b701-d4c8-4313-ab89-0ba44d4ce7e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.315375 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-kube-api-access-mq776" (OuterVolumeSpecName: "kube-api-access-mq776") pod "7087b701-d4c8-4313-ab89-0ba44d4ce7e4" (UID: "7087b701-d4c8-4313-ab89-0ba44d4ce7e4"). InnerVolumeSpecName "kube-api-access-mq776". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.330566 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7087b701-d4c8-4313-ab89-0ba44d4ce7e4" (UID: "7087b701-d4c8-4313-ab89-0ba44d4ce7e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.409391 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.409425 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq776\" (UniqueName: \"kubernetes.io/projected/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-kube-api-access-mq776\") on node \"crc\" DevicePath \"\"" Dec 15 08:41:38 crc kubenswrapper[4876]: I1215 08:41:38.409435 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7087b701-d4c8-4313-ab89-0ba44d4ce7e4-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:41:39 crc kubenswrapper[4876]: I1215 08:41:39.212919 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g4fpk" Dec 15 08:41:39 crc kubenswrapper[4876]: I1215 08:41:39.230762 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g4fpk"] Dec 15 08:41:39 crc kubenswrapper[4876]: I1215 08:41:39.238255 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g4fpk"] Dec 15 08:41:40 crc kubenswrapper[4876]: I1215 08:41:40.717588 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" path="/var/lib/kubelet/pods/7087b701-d4c8-4313-ab89-0ba44d4ce7e4/volumes" Dec 15 08:41:43 crc kubenswrapper[4876]: I1215 08:41:43.705344 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:41:43 crc kubenswrapper[4876]: E1215 08:41:43.705875 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:41:56 crc kubenswrapper[4876]: I1215 08:41:56.705958 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:41:56 crc kubenswrapper[4876]: E1215 08:41:56.706721 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:42:07 crc kubenswrapper[4876]: I1215 08:42:07.706384 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:42:08 crc kubenswrapper[4876]: I1215 08:42:08.414481 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"aba1a98a0d3176406b1734d4bed65f4bfe3fed20e881d715632d44c039a22cbd"} Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.736911 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-97wgc"] Dec 15 08:42:52 crc kubenswrapper[4876]: E1215 08:42:52.737787 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerName="extract-utilities" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.737801 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerName="extract-utilities" Dec 15 08:42:52 crc kubenswrapper[4876]: E1215 08:42:52.737819 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerName="registry-server" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.737827 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerName="registry-server" Dec 15 08:42:52 crc kubenswrapper[4876]: E1215 08:42:52.737846 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerName="extract-content" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.737852 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerName="extract-content" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.737997 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7087b701-d4c8-4313-ab89-0ba44d4ce7e4" containerName="registry-server" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.741498 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.760802 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-97wgc"] Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.860632 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-catalog-content\") pod \"certified-operators-97wgc\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.860699 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-utilities\") pod \"certified-operators-97wgc\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.860791 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgkkz\" (UniqueName: \"kubernetes.io/projected/fef62073-07b5-4ea3-97d4-65e53717f74a-kube-api-access-xgkkz\") pod \"certified-operators-97wgc\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.962907 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-catalog-content\") pod \"certified-operators-97wgc\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.962973 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-utilities\") pod \"certified-operators-97wgc\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.963058 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgkkz\" (UniqueName: \"kubernetes.io/projected/fef62073-07b5-4ea3-97d4-65e53717f74a-kube-api-access-xgkkz\") pod \"certified-operators-97wgc\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.963603 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-catalog-content\") pod \"certified-operators-97wgc\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.963741 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-utilities\") pod \"certified-operators-97wgc\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:52 crc kubenswrapper[4876]: I1215 08:42:52.985297 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgkkz\" (UniqueName: \"kubernetes.io/projected/fef62073-07b5-4ea3-97d4-65e53717f74a-kube-api-access-xgkkz\") pod \"certified-operators-97wgc\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:53 crc kubenswrapper[4876]: I1215 08:42:53.068861 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:42:53 crc kubenswrapper[4876]: I1215 08:42:53.541095 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-97wgc"] Dec 15 08:42:53 crc kubenswrapper[4876]: I1215 08:42:53.753745 4876 generic.go:334] "Generic (PLEG): container finished" podID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerID="1afbc3e61ac3dafeb839b472bc7b76012aa3687931210ed934bbca8fc87e7121" exitCode=0 Dec 15 08:42:53 crc kubenswrapper[4876]: I1215 08:42:53.753819 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97wgc" event={"ID":"fef62073-07b5-4ea3-97d4-65e53717f74a","Type":"ContainerDied","Data":"1afbc3e61ac3dafeb839b472bc7b76012aa3687931210ed934bbca8fc87e7121"} Dec 15 08:42:53 crc kubenswrapper[4876]: I1215 08:42:53.754148 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97wgc" event={"ID":"fef62073-07b5-4ea3-97d4-65e53717f74a","Type":"ContainerStarted","Data":"cbedf92a694c613612e9e4fbf2c4b7790738ddccb081cb0fa757ff4dd8a122fa"} Dec 15 08:42:53 crc kubenswrapper[4876]: I1215 08:42:53.755737 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 08:42:54 crc kubenswrapper[4876]: I1215 08:42:54.777419 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97wgc" event={"ID":"fef62073-07b5-4ea3-97d4-65e53717f74a","Type":"ContainerStarted","Data":"82e80832a1e4145929b8793d418c5358c29a45e8ae54caf71069a198906c0380"} Dec 15 08:42:55 crc kubenswrapper[4876]: I1215 08:42:55.788538 4876 generic.go:334] "Generic (PLEG): container finished" podID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerID="82e80832a1e4145929b8793d418c5358c29a45e8ae54caf71069a198906c0380" exitCode=0 Dec 15 08:42:55 crc kubenswrapper[4876]: I1215 08:42:55.788628 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97wgc" event={"ID":"fef62073-07b5-4ea3-97d4-65e53717f74a","Type":"ContainerDied","Data":"82e80832a1e4145929b8793d418c5358c29a45e8ae54caf71069a198906c0380"} Dec 15 08:42:56 crc kubenswrapper[4876]: I1215 08:42:56.798333 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97wgc" event={"ID":"fef62073-07b5-4ea3-97d4-65e53717f74a","Type":"ContainerStarted","Data":"014b4507d5baf2ba2ba8f4b8420eb69d9c00d732299858787171ecffab17b206"} Dec 15 08:42:56 crc kubenswrapper[4876]: I1215 08:42:56.822092 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-97wgc" podStartSLOduration=2.364159596 podStartE2EDuration="4.822072978s" podCreationTimestamp="2025-12-15 08:42:52 +0000 UTC" firstStartedPulling="2025-12-15 08:42:53.755487809 +0000 UTC m=+6699.326630720" lastFinishedPulling="2025-12-15 08:42:56.213401191 +0000 UTC m=+6701.784544102" observedRunningTime="2025-12-15 08:42:56.816047177 +0000 UTC m=+6702.387190088" watchObservedRunningTime="2025-12-15 08:42:56.822072978 +0000 UTC m=+6702.393215909" Dec 15 08:43:03 crc kubenswrapper[4876]: I1215 08:43:03.068983 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:43:03 crc kubenswrapper[4876]: I1215 08:43:03.069550 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:43:03 crc kubenswrapper[4876]: I1215 08:43:03.107537 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:43:03 crc kubenswrapper[4876]: I1215 08:43:03.899051 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:43:03 crc kubenswrapper[4876]: I1215 08:43:03.941478 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-97wgc"] Dec 15 08:43:05 crc kubenswrapper[4876]: I1215 08:43:05.871956 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-97wgc" podUID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerName="registry-server" containerID="cri-o://014b4507d5baf2ba2ba8f4b8420eb69d9c00d732299858787171ecffab17b206" gracePeriod=2 Dec 15 08:43:06 crc kubenswrapper[4876]: I1215 08:43:06.882596 4876 generic.go:334] "Generic (PLEG): container finished" podID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerID="014b4507d5baf2ba2ba8f4b8420eb69d9c00d732299858787171ecffab17b206" exitCode=0 Dec 15 08:43:06 crc kubenswrapper[4876]: I1215 08:43:06.882636 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97wgc" event={"ID":"fef62073-07b5-4ea3-97d4-65e53717f74a","Type":"ContainerDied","Data":"014b4507d5baf2ba2ba8f4b8420eb69d9c00d732299858787171ecffab17b206"} Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.428915 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.573769 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-utilities\") pod \"fef62073-07b5-4ea3-97d4-65e53717f74a\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.573851 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-catalog-content\") pod \"fef62073-07b5-4ea3-97d4-65e53717f74a\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.573943 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgkkz\" (UniqueName: \"kubernetes.io/projected/fef62073-07b5-4ea3-97d4-65e53717f74a-kube-api-access-xgkkz\") pod \"fef62073-07b5-4ea3-97d4-65e53717f74a\" (UID: \"fef62073-07b5-4ea3-97d4-65e53717f74a\") " Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.574977 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-utilities" (OuterVolumeSpecName: "utilities") pod "fef62073-07b5-4ea3-97d4-65e53717f74a" (UID: "fef62073-07b5-4ea3-97d4-65e53717f74a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.583594 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fef62073-07b5-4ea3-97d4-65e53717f74a-kube-api-access-xgkkz" (OuterVolumeSpecName: "kube-api-access-xgkkz") pod "fef62073-07b5-4ea3-97d4-65e53717f74a" (UID: "fef62073-07b5-4ea3-97d4-65e53717f74a"). InnerVolumeSpecName "kube-api-access-xgkkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.638417 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fef62073-07b5-4ea3-97d4-65e53717f74a" (UID: "fef62073-07b5-4ea3-97d4-65e53717f74a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.675771 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgkkz\" (UniqueName: \"kubernetes.io/projected/fef62073-07b5-4ea3-97d4-65e53717f74a-kube-api-access-xgkkz\") on node \"crc\" DevicePath \"\"" Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.675812 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.675823 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fef62073-07b5-4ea3-97d4-65e53717f74a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.893403 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97wgc" event={"ID":"fef62073-07b5-4ea3-97d4-65e53717f74a","Type":"ContainerDied","Data":"cbedf92a694c613612e9e4fbf2c4b7790738ddccb081cb0fa757ff4dd8a122fa"} Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.893474 4876 scope.go:117] "RemoveContainer" containerID="014b4507d5baf2ba2ba8f4b8420eb69d9c00d732299858787171ecffab17b206" Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.893580 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97wgc" Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.924501 4876 scope.go:117] "RemoveContainer" containerID="82e80832a1e4145929b8793d418c5358c29a45e8ae54caf71069a198906c0380" Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.926870 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-97wgc"] Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.934294 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-97wgc"] Dec 15 08:43:07 crc kubenswrapper[4876]: I1215 08:43:07.945934 4876 scope.go:117] "RemoveContainer" containerID="1afbc3e61ac3dafeb839b472bc7b76012aa3687931210ed934bbca8fc87e7121" Dec 15 08:43:08 crc kubenswrapper[4876]: I1215 08:43:08.716838 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fef62073-07b5-4ea3-97d4-65e53717f74a" path="/var/lib/kubelet/pods/fef62073-07b5-4ea3-97d4-65e53717f74a/volumes" Dec 15 08:43:56 crc kubenswrapper[4876]: I1215 08:43:56.771216 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ppx4h"] Dec 15 08:43:56 crc kubenswrapper[4876]: E1215 08:43:56.772233 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerName="extract-content" Dec 15 08:43:56 crc kubenswrapper[4876]: I1215 08:43:56.772265 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerName="extract-content" Dec 15 08:43:56 crc kubenswrapper[4876]: E1215 08:43:56.772278 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerName="registry-server" Dec 15 08:43:56 crc kubenswrapper[4876]: I1215 08:43:56.772285 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerName="registry-server" Dec 15 08:43:56 crc kubenswrapper[4876]: E1215 08:43:56.772344 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerName="extract-utilities" Dec 15 08:43:56 crc kubenswrapper[4876]: I1215 08:43:56.772352 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerName="extract-utilities" Dec 15 08:43:56 crc kubenswrapper[4876]: I1215 08:43:56.772510 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fef62073-07b5-4ea3-97d4-65e53717f74a" containerName="registry-server" Dec 15 08:43:56 crc kubenswrapper[4876]: I1215 08:43:56.773733 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:56 crc kubenswrapper[4876]: I1215 08:43:56.788418 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ppx4h"] Dec 15 08:43:56 crc kubenswrapper[4876]: I1215 08:43:56.914686 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-catalog-content\") pod \"redhat-operators-ppx4h\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:56 crc kubenswrapper[4876]: I1215 08:43:56.914757 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws7j6\" (UniqueName: \"kubernetes.io/projected/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-kube-api-access-ws7j6\") pod \"redhat-operators-ppx4h\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:56 crc kubenswrapper[4876]: I1215 08:43:56.915094 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-utilities\") pod \"redhat-operators-ppx4h\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:57 crc kubenswrapper[4876]: I1215 08:43:57.016472 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-utilities\") pod \"redhat-operators-ppx4h\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:57 crc kubenswrapper[4876]: I1215 08:43:57.016583 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-catalog-content\") pod \"redhat-operators-ppx4h\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:57 crc kubenswrapper[4876]: I1215 08:43:57.016627 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws7j6\" (UniqueName: \"kubernetes.io/projected/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-kube-api-access-ws7j6\") pod \"redhat-operators-ppx4h\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:57 crc kubenswrapper[4876]: I1215 08:43:57.017055 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-utilities\") pod \"redhat-operators-ppx4h\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:57 crc kubenswrapper[4876]: I1215 08:43:57.017071 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-catalog-content\") pod \"redhat-operators-ppx4h\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:57 crc kubenswrapper[4876]: I1215 08:43:57.034707 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws7j6\" (UniqueName: \"kubernetes.io/projected/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-kube-api-access-ws7j6\") pod \"redhat-operators-ppx4h\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:57 crc kubenswrapper[4876]: I1215 08:43:57.121489 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:43:57 crc kubenswrapper[4876]: I1215 08:43:57.561029 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ppx4h"] Dec 15 08:43:58 crc kubenswrapper[4876]: I1215 08:43:58.247703 4876 generic.go:334] "Generic (PLEG): container finished" podID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerID="3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38" exitCode=0 Dec 15 08:43:58 crc kubenswrapper[4876]: I1215 08:43:58.247749 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ppx4h" event={"ID":"5f508be0-2fa4-4b41-a7bd-c0366bac78a5","Type":"ContainerDied","Data":"3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38"} Dec 15 08:43:58 crc kubenswrapper[4876]: I1215 08:43:58.247773 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ppx4h" event={"ID":"5f508be0-2fa4-4b41-a7bd-c0366bac78a5","Type":"ContainerStarted","Data":"94dd1401e3e133cbf4d5147b0a068181181229ab05fd3087a7b077112a633fdf"} Dec 15 08:44:06 crc kubenswrapper[4876]: I1215 08:44:06.323687 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ppx4h" event={"ID":"5f508be0-2fa4-4b41-a7bd-c0366bac78a5","Type":"ContainerStarted","Data":"390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8"} Dec 15 08:44:07 crc kubenswrapper[4876]: I1215 08:44:07.333797 4876 generic.go:334] "Generic (PLEG): container finished" podID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerID="390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8" exitCode=0 Dec 15 08:44:07 crc kubenswrapper[4876]: I1215 08:44:07.333845 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ppx4h" event={"ID":"5f508be0-2fa4-4b41-a7bd-c0366bac78a5","Type":"ContainerDied","Data":"390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8"} Dec 15 08:44:08 crc kubenswrapper[4876]: I1215 08:44:08.341446 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ppx4h" event={"ID":"5f508be0-2fa4-4b41-a7bd-c0366bac78a5","Type":"ContainerStarted","Data":"5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522"} Dec 15 08:44:08 crc kubenswrapper[4876]: I1215 08:44:08.360944 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ppx4h" podStartSLOduration=2.859711121 podStartE2EDuration="12.36092467s" podCreationTimestamp="2025-12-15 08:43:56 +0000 UTC" firstStartedPulling="2025-12-15 08:43:58.249645786 +0000 UTC m=+6763.820788697" lastFinishedPulling="2025-12-15 08:44:07.750859335 +0000 UTC m=+6773.322002246" observedRunningTime="2025-12-15 08:44:08.357370524 +0000 UTC m=+6773.928513445" watchObservedRunningTime="2025-12-15 08:44:08.36092467 +0000 UTC m=+6773.932067581" Dec 15 08:44:17 crc kubenswrapper[4876]: I1215 08:44:17.122172 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:44:17 crc kubenswrapper[4876]: I1215 08:44:17.123130 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:44:17 crc kubenswrapper[4876]: I1215 08:44:17.180405 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:44:17 crc kubenswrapper[4876]: I1215 08:44:17.468811 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 08:44:17 crc kubenswrapper[4876]: I1215 08:44:17.531892 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ppx4h"] Dec 15 08:44:17 crc kubenswrapper[4876]: I1215 08:44:17.574439 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-97lkp"] Dec 15 08:44:17 crc kubenswrapper[4876]: I1215 08:44:17.574706 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-97lkp" podUID="f59147b5-6786-4a73-8d94-eac80330370c" containerName="registry-server" containerID="cri-o://884279527c9e8b114a12802ed2d99e3510320c89eb5db77946c0911f61106f46" gracePeriod=2 Dec 15 08:44:19 crc kubenswrapper[4876]: I1215 08:44:19.446688 4876 generic.go:334] "Generic (PLEG): container finished" podID="f59147b5-6786-4a73-8d94-eac80330370c" containerID="884279527c9e8b114a12802ed2d99e3510320c89eb5db77946c0911f61106f46" exitCode=0 Dec 15 08:44:19 crc kubenswrapper[4876]: I1215 08:44:19.446876 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97lkp" event={"ID":"f59147b5-6786-4a73-8d94-eac80330370c","Type":"ContainerDied","Data":"884279527c9e8b114a12802ed2d99e3510320c89eb5db77946c0911f61106f46"} Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.173234 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.284789 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6qtj\" (UniqueName: \"kubernetes.io/projected/f59147b5-6786-4a73-8d94-eac80330370c-kube-api-access-c6qtj\") pod \"f59147b5-6786-4a73-8d94-eac80330370c\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.285187 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-utilities\") pod \"f59147b5-6786-4a73-8d94-eac80330370c\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.285252 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-catalog-content\") pod \"f59147b5-6786-4a73-8d94-eac80330370c\" (UID: \"f59147b5-6786-4a73-8d94-eac80330370c\") " Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.285754 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-utilities" (OuterVolumeSpecName: "utilities") pod "f59147b5-6786-4a73-8d94-eac80330370c" (UID: "f59147b5-6786-4a73-8d94-eac80330370c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.292518 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f59147b5-6786-4a73-8d94-eac80330370c-kube-api-access-c6qtj" (OuterVolumeSpecName: "kube-api-access-c6qtj") pod "f59147b5-6786-4a73-8d94-eac80330370c" (UID: "f59147b5-6786-4a73-8d94-eac80330370c"). InnerVolumeSpecName "kube-api-access-c6qtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.378642 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f59147b5-6786-4a73-8d94-eac80330370c" (UID: "f59147b5-6786-4a73-8d94-eac80330370c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.386747 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.386782 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f59147b5-6786-4a73-8d94-eac80330370c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.386795 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6qtj\" (UniqueName: \"kubernetes.io/projected/f59147b5-6786-4a73-8d94-eac80330370c-kube-api-access-c6qtj\") on node \"crc\" DevicePath \"\"" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.458301 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97lkp" event={"ID":"f59147b5-6786-4a73-8d94-eac80330370c","Type":"ContainerDied","Data":"5b86ddb76cf8b2515dbce8d2bf05aa36bc9bd512120c18ae01027c013f961557"} Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.458356 4876 scope.go:117] "RemoveContainer" containerID="884279527c9e8b114a12802ed2d99e3510320c89eb5db77946c0911f61106f46" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.458370 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97lkp" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.481951 4876 scope.go:117] "RemoveContainer" containerID="1253a34d7893ceace55417de6462076887572c0cd7ee62ab9bcbf5a64d50ca4d" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.502230 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-97lkp"] Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.508077 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-97lkp"] Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.524612 4876 scope.go:117] "RemoveContainer" containerID="ecb3226ab81a598d0438c371570083df754a3814f84060567c3dd40c326028ce" Dec 15 08:44:20 crc kubenswrapper[4876]: I1215 08:44:20.717141 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f59147b5-6786-4a73-8d94-eac80330370c" path="/var/lib/kubelet/pods/f59147b5-6786-4a73-8d94-eac80330370c/volumes" Dec 15 08:44:27 crc kubenswrapper[4876]: I1215 08:44:27.323280 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:44:27 crc kubenswrapper[4876]: I1215 08:44:27.324567 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:44:28 crc kubenswrapper[4876]: I1215 08:44:28.980481 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Dec 15 08:44:28 crc kubenswrapper[4876]: E1215 08:44:28.981149 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f59147b5-6786-4a73-8d94-eac80330370c" containerName="extract-content" Dec 15 08:44:28 crc kubenswrapper[4876]: I1215 08:44:28.981164 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f59147b5-6786-4a73-8d94-eac80330370c" containerName="extract-content" Dec 15 08:44:28 crc kubenswrapper[4876]: E1215 08:44:28.981181 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f59147b5-6786-4a73-8d94-eac80330370c" containerName="registry-server" Dec 15 08:44:28 crc kubenswrapper[4876]: I1215 08:44:28.981188 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f59147b5-6786-4a73-8d94-eac80330370c" containerName="registry-server" Dec 15 08:44:28 crc kubenswrapper[4876]: E1215 08:44:28.981204 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f59147b5-6786-4a73-8d94-eac80330370c" containerName="extract-utilities" Dec 15 08:44:28 crc kubenswrapper[4876]: I1215 08:44:28.981211 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f59147b5-6786-4a73-8d94-eac80330370c" containerName="extract-utilities" Dec 15 08:44:28 crc kubenswrapper[4876]: I1215 08:44:28.981372 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f59147b5-6786-4a73-8d94-eac80330370c" containerName="registry-server" Dec 15 08:44:28 crc kubenswrapper[4876]: I1215 08:44:28.981973 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Dec 15 08:44:28 crc kubenswrapper[4876]: I1215 08:44:28.986297 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-qgf6m" Dec 15 08:44:28 crc kubenswrapper[4876]: I1215 08:44:28.989443 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Dec 15 08:44:29 crc kubenswrapper[4876]: I1215 08:44:29.118554 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\") pod \"mariadb-copy-data\" (UID: \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\") " pod="openstack/mariadb-copy-data" Dec 15 08:44:29 crc kubenswrapper[4876]: I1215 08:44:29.118623 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72gtv\" (UniqueName: \"kubernetes.io/projected/7f902bcf-51d5-4b82-ac4c-2e30136731a4-kube-api-access-72gtv\") pod \"mariadb-copy-data\" (UID: \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\") " pod="openstack/mariadb-copy-data" Dec 15 08:44:29 crc kubenswrapper[4876]: I1215 08:44:29.219907 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\") pod \"mariadb-copy-data\" (UID: \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\") " pod="openstack/mariadb-copy-data" Dec 15 08:44:29 crc kubenswrapper[4876]: I1215 08:44:29.219996 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72gtv\" (UniqueName: \"kubernetes.io/projected/7f902bcf-51d5-4b82-ac4c-2e30136731a4-kube-api-access-72gtv\") pod \"mariadb-copy-data\" (UID: \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\") " pod="openstack/mariadb-copy-data" Dec 15 08:44:29 crc kubenswrapper[4876]: I1215 08:44:29.223039 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:44:29 crc kubenswrapper[4876]: I1215 08:44:29.223089 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\") pod \"mariadb-copy-data\" (UID: \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ffd7446a9f9ba641614134dd92cda4d5d302d05265f70d848ff1946313e1c863/globalmount\"" pod="openstack/mariadb-copy-data" Dec 15 08:44:29 crc kubenswrapper[4876]: I1215 08:44:29.249565 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72gtv\" (UniqueName: \"kubernetes.io/projected/7f902bcf-51d5-4b82-ac4c-2e30136731a4-kube-api-access-72gtv\") pod \"mariadb-copy-data\" (UID: \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\") " pod="openstack/mariadb-copy-data" Dec 15 08:44:29 crc kubenswrapper[4876]: I1215 08:44:29.259079 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\") pod \"mariadb-copy-data\" (UID: \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\") " pod="openstack/mariadb-copy-data" Dec 15 08:44:29 crc kubenswrapper[4876]: I1215 08:44:29.302776 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Dec 15 08:44:29 crc kubenswrapper[4876]: I1215 08:44:29.784863 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Dec 15 08:44:30 crc kubenswrapper[4876]: I1215 08:44:30.531220 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"7f902bcf-51d5-4b82-ac4c-2e30136731a4","Type":"ContainerStarted","Data":"b1e03640a51ce3ca410ef00197a627012e0f742073fc9b79073b47ea370762be"} Dec 15 08:44:30 crc kubenswrapper[4876]: I1215 08:44:30.531553 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"7f902bcf-51d5-4b82-ac4c-2e30136731a4","Type":"ContainerStarted","Data":"0241653a80f1752166964bdaae3489e6a336b04fd2309382cdd6cbd38decdd9f"} Dec 15 08:44:30 crc kubenswrapper[4876]: I1215 08:44:30.544923 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.544901814 podStartE2EDuration="3.544901814s" podCreationTimestamp="2025-12-15 08:44:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:44:30.542374206 +0000 UTC m=+6796.113517137" watchObservedRunningTime="2025-12-15 08:44:30.544901814 +0000 UTC m=+6796.116044725" Dec 15 08:44:33 crc kubenswrapper[4876]: I1215 08:44:33.701297 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Dec 15 08:44:33 crc kubenswrapper[4876]: I1215 08:44:33.703179 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 15 08:44:33 crc kubenswrapper[4876]: I1215 08:44:33.708291 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 15 08:44:33 crc kubenswrapper[4876]: I1215 08:44:33.805998 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bndtq\" (UniqueName: \"kubernetes.io/projected/4ff6ef01-5cac-4a94-a42a-4dad7fa820d9-kube-api-access-bndtq\") pod \"mariadb-client\" (UID: \"4ff6ef01-5cac-4a94-a42a-4dad7fa820d9\") " pod="openstack/mariadb-client" Dec 15 08:44:33 crc kubenswrapper[4876]: I1215 08:44:33.907679 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bndtq\" (UniqueName: \"kubernetes.io/projected/4ff6ef01-5cac-4a94-a42a-4dad7fa820d9-kube-api-access-bndtq\") pod \"mariadb-client\" (UID: \"4ff6ef01-5cac-4a94-a42a-4dad7fa820d9\") " pod="openstack/mariadb-client" Dec 15 08:44:33 crc kubenswrapper[4876]: I1215 08:44:33.926930 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bndtq\" (UniqueName: \"kubernetes.io/projected/4ff6ef01-5cac-4a94-a42a-4dad7fa820d9-kube-api-access-bndtq\") pod \"mariadb-client\" (UID: \"4ff6ef01-5cac-4a94-a42a-4dad7fa820d9\") " pod="openstack/mariadb-client" Dec 15 08:44:34 crc kubenswrapper[4876]: I1215 08:44:34.026003 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 15 08:44:34 crc kubenswrapper[4876]: I1215 08:44:34.434384 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 15 08:44:34 crc kubenswrapper[4876]: W1215 08:44:34.440218 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ff6ef01_5cac_4a94_a42a_4dad7fa820d9.slice/crio-5b6c3f29c4d634e773e05bcfe922b4047c74381b2b713105fe2afcbd0c09439f WatchSource:0}: Error finding container 5b6c3f29c4d634e773e05bcfe922b4047c74381b2b713105fe2afcbd0c09439f: Status 404 returned error can't find the container with id 5b6c3f29c4d634e773e05bcfe922b4047c74381b2b713105fe2afcbd0c09439f Dec 15 08:44:34 crc kubenswrapper[4876]: I1215 08:44:34.561608 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"4ff6ef01-5cac-4a94-a42a-4dad7fa820d9","Type":"ContainerStarted","Data":"5b6c3f29c4d634e773e05bcfe922b4047c74381b2b713105fe2afcbd0c09439f"} Dec 15 08:44:35 crc kubenswrapper[4876]: I1215 08:44:35.569379 4876 generic.go:334] "Generic (PLEG): container finished" podID="4ff6ef01-5cac-4a94-a42a-4dad7fa820d9" containerID="02e42033b0273d1d2c2f971460ccebdfefa517669fc38412e4ba355eaa1b9326" exitCode=0 Dec 15 08:44:35 crc kubenswrapper[4876]: I1215 08:44:35.569452 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"4ff6ef01-5cac-4a94-a42a-4dad7fa820d9","Type":"ContainerDied","Data":"02e42033b0273d1d2c2f971460ccebdfefa517669fc38412e4ba355eaa1b9326"} Dec 15 08:44:36 crc kubenswrapper[4876]: I1215 08:44:36.834354 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 15 08:44:36 crc kubenswrapper[4876]: I1215 08:44:36.856492 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_4ff6ef01-5cac-4a94-a42a-4dad7fa820d9/mariadb-client/0.log" Dec 15 08:44:36 crc kubenswrapper[4876]: I1215 08:44:36.882740 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Dec 15 08:44:36 crc kubenswrapper[4876]: I1215 08:44:36.893836 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Dec 15 08:44:36 crc kubenswrapper[4876]: I1215 08:44:36.957928 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bndtq\" (UniqueName: \"kubernetes.io/projected/4ff6ef01-5cac-4a94-a42a-4dad7fa820d9-kube-api-access-bndtq\") pod \"4ff6ef01-5cac-4a94-a42a-4dad7fa820d9\" (UID: \"4ff6ef01-5cac-4a94-a42a-4dad7fa820d9\") " Dec 15 08:44:36 crc kubenswrapper[4876]: I1215 08:44:36.962763 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ff6ef01-5cac-4a94-a42a-4dad7fa820d9-kube-api-access-bndtq" (OuterVolumeSpecName: "kube-api-access-bndtq") pod "4ff6ef01-5cac-4a94-a42a-4dad7fa820d9" (UID: "4ff6ef01-5cac-4a94-a42a-4dad7fa820d9"). InnerVolumeSpecName "kube-api-access-bndtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.004248 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Dec 15 08:44:37 crc kubenswrapper[4876]: E1215 08:44:37.004996 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ff6ef01-5cac-4a94-a42a-4dad7fa820d9" containerName="mariadb-client" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.005018 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ff6ef01-5cac-4a94-a42a-4dad7fa820d9" containerName="mariadb-client" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.005179 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ff6ef01-5cac-4a94-a42a-4dad7fa820d9" containerName="mariadb-client" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.005696 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.012702 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.060042 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bndtq\" (UniqueName: \"kubernetes.io/projected/4ff6ef01-5cac-4a94-a42a-4dad7fa820d9-kube-api-access-bndtq\") on node \"crc\" DevicePath \"\"" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.161038 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv4m9\" (UniqueName: \"kubernetes.io/projected/14eefc04-d698-49a1-9df0-ecc66dae05fd-kube-api-access-bv4m9\") pod \"mariadb-client\" (UID: \"14eefc04-d698-49a1-9df0-ecc66dae05fd\") " pod="openstack/mariadb-client" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.262022 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv4m9\" (UniqueName: \"kubernetes.io/projected/14eefc04-d698-49a1-9df0-ecc66dae05fd-kube-api-access-bv4m9\") pod \"mariadb-client\" (UID: \"14eefc04-d698-49a1-9df0-ecc66dae05fd\") " pod="openstack/mariadb-client" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.288806 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv4m9\" (UniqueName: \"kubernetes.io/projected/14eefc04-d698-49a1-9df0-ecc66dae05fd-kube-api-access-bv4m9\") pod \"mariadb-client\" (UID: \"14eefc04-d698-49a1-9df0-ecc66dae05fd\") " pod="openstack/mariadb-client" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.327876 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.600943 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b6c3f29c4d634e773e05bcfe922b4047c74381b2b713105fe2afcbd0c09439f" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.600992 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.625768 4876 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="4ff6ef01-5cac-4a94-a42a-4dad7fa820d9" podUID="14eefc04-d698-49a1-9df0-ecc66dae05fd" Dec 15 08:44:37 crc kubenswrapper[4876]: I1215 08:44:37.788461 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Dec 15 08:44:37 crc kubenswrapper[4876]: W1215 08:44:37.801001 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14eefc04_d698_49a1_9df0_ecc66dae05fd.slice/crio-4f378ebfe44c2c3fd78c22d90673f6911f0f3c8b5cc306acecddabeff3710bb4 WatchSource:0}: Error finding container 4f378ebfe44c2c3fd78c22d90673f6911f0f3c8b5cc306acecddabeff3710bb4: Status 404 returned error can't find the container with id 4f378ebfe44c2c3fd78c22d90673f6911f0f3c8b5cc306acecddabeff3710bb4 Dec 15 08:44:38 crc kubenswrapper[4876]: I1215 08:44:38.609537 4876 generic.go:334] "Generic (PLEG): container finished" podID="14eefc04-d698-49a1-9df0-ecc66dae05fd" containerID="90439e07806b6c66d6c500f9a910142d2f236a9f91b0197d1ea346d0a0357e6b" exitCode=0 Dec 15 08:44:38 crc kubenswrapper[4876]: I1215 08:44:38.609593 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"14eefc04-d698-49a1-9df0-ecc66dae05fd","Type":"ContainerDied","Data":"90439e07806b6c66d6c500f9a910142d2f236a9f91b0197d1ea346d0a0357e6b"} Dec 15 08:44:38 crc kubenswrapper[4876]: I1215 08:44:38.609853 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"14eefc04-d698-49a1-9df0-ecc66dae05fd","Type":"ContainerStarted","Data":"4f378ebfe44c2c3fd78c22d90673f6911f0f3c8b5cc306acecddabeff3710bb4"} Dec 15 08:44:38 crc kubenswrapper[4876]: I1215 08:44:38.714269 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ff6ef01-5cac-4a94-a42a-4dad7fa820d9" path="/var/lib/kubelet/pods/4ff6ef01-5cac-4a94-a42a-4dad7fa820d9/volumes" Dec 15 08:44:39 crc kubenswrapper[4876]: I1215 08:44:39.861386 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 15 08:44:39 crc kubenswrapper[4876]: I1215 08:44:39.878121 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_14eefc04-d698-49a1-9df0-ecc66dae05fd/mariadb-client/0.log" Dec 15 08:44:39 crc kubenswrapper[4876]: I1215 08:44:39.909058 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Dec 15 08:44:39 crc kubenswrapper[4876]: I1215 08:44:39.915693 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Dec 15 08:44:40 crc kubenswrapper[4876]: I1215 08:44:40.005017 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bv4m9\" (UniqueName: \"kubernetes.io/projected/14eefc04-d698-49a1-9df0-ecc66dae05fd-kube-api-access-bv4m9\") pod \"14eefc04-d698-49a1-9df0-ecc66dae05fd\" (UID: \"14eefc04-d698-49a1-9df0-ecc66dae05fd\") " Dec 15 08:44:40 crc kubenswrapper[4876]: I1215 08:44:40.010326 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14eefc04-d698-49a1-9df0-ecc66dae05fd-kube-api-access-bv4m9" (OuterVolumeSpecName: "kube-api-access-bv4m9") pod "14eefc04-d698-49a1-9df0-ecc66dae05fd" (UID: "14eefc04-d698-49a1-9df0-ecc66dae05fd"). InnerVolumeSpecName "kube-api-access-bv4m9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:44:40 crc kubenswrapper[4876]: I1215 08:44:40.106744 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bv4m9\" (UniqueName: \"kubernetes.io/projected/14eefc04-d698-49a1-9df0-ecc66dae05fd-kube-api-access-bv4m9\") on node \"crc\" DevicePath \"\"" Dec 15 08:44:40 crc kubenswrapper[4876]: I1215 08:44:40.632113 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f378ebfe44c2c3fd78c22d90673f6911f0f3c8b5cc306acecddabeff3710bb4" Dec 15 08:44:40 crc kubenswrapper[4876]: I1215 08:44:40.632508 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Dec 15 08:44:40 crc kubenswrapper[4876]: I1215 08:44:40.714539 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14eefc04-d698-49a1-9df0-ecc66dae05fd" path="/var/lib/kubelet/pods/14eefc04-d698-49a1-9df0-ecc66dae05fd/volumes" Dec 15 08:44:57 crc kubenswrapper[4876]: I1215 08:44:57.322764 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:44:57 crc kubenswrapper[4876]: I1215 08:44:57.323376 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.155909 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms"] Dec 15 08:45:00 crc kubenswrapper[4876]: E1215 08:45:00.156733 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14eefc04-d698-49a1-9df0-ecc66dae05fd" containerName="mariadb-client" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.156753 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="14eefc04-d698-49a1-9df0-ecc66dae05fd" containerName="mariadb-client" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.156936 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="14eefc04-d698-49a1-9df0-ecc66dae05fd" containerName="mariadb-client" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.157625 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.160063 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.160090 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.164242 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms"] Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.311486 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cbdm\" (UniqueName: \"kubernetes.io/projected/098b7466-0828-4b42-855c-9e4095061276-kube-api-access-5cbdm\") pod \"collect-profiles-29429805-rqgms\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.311549 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/098b7466-0828-4b42-855c-9e4095061276-secret-volume\") pod \"collect-profiles-29429805-rqgms\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.311574 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/098b7466-0828-4b42-855c-9e4095061276-config-volume\") pod \"collect-profiles-29429805-rqgms\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.413158 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cbdm\" (UniqueName: \"kubernetes.io/projected/098b7466-0828-4b42-855c-9e4095061276-kube-api-access-5cbdm\") pod \"collect-profiles-29429805-rqgms\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.413216 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/098b7466-0828-4b42-855c-9e4095061276-secret-volume\") pod \"collect-profiles-29429805-rqgms\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.413247 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/098b7466-0828-4b42-855c-9e4095061276-config-volume\") pod \"collect-profiles-29429805-rqgms\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.414306 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/098b7466-0828-4b42-855c-9e4095061276-config-volume\") pod \"collect-profiles-29429805-rqgms\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.424053 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/098b7466-0828-4b42-855c-9e4095061276-secret-volume\") pod \"collect-profiles-29429805-rqgms\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.431953 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cbdm\" (UniqueName: \"kubernetes.io/projected/098b7466-0828-4b42-855c-9e4095061276-kube-api-access-5cbdm\") pod \"collect-profiles-29429805-rqgms\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.481192 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:00 crc kubenswrapper[4876]: I1215 08:45:00.875607 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms"] Dec 15 08:45:01 crc kubenswrapper[4876]: I1215 08:45:01.785884 4876 generic.go:334] "Generic (PLEG): container finished" podID="098b7466-0828-4b42-855c-9e4095061276" containerID="6d2ce8d6a0d6d5c60dc97123a54f65af108248163b966c31d4a1a06febea33a2" exitCode=0 Dec 15 08:45:01 crc kubenswrapper[4876]: I1215 08:45:01.786184 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" event={"ID":"098b7466-0828-4b42-855c-9e4095061276","Type":"ContainerDied","Data":"6d2ce8d6a0d6d5c60dc97123a54f65af108248163b966c31d4a1a06febea33a2"} Dec 15 08:45:01 crc kubenswrapper[4876]: I1215 08:45:01.786255 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" event={"ID":"098b7466-0828-4b42-855c-9e4095061276","Type":"ContainerStarted","Data":"11f797aa0fb6ec9ebcf70b782de1afdecd5ec0100548505682e30e241f7617ff"} Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.059736 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.176335 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cbdm\" (UniqueName: \"kubernetes.io/projected/098b7466-0828-4b42-855c-9e4095061276-kube-api-access-5cbdm\") pod \"098b7466-0828-4b42-855c-9e4095061276\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.176454 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/098b7466-0828-4b42-855c-9e4095061276-secret-volume\") pod \"098b7466-0828-4b42-855c-9e4095061276\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.176544 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/098b7466-0828-4b42-855c-9e4095061276-config-volume\") pod \"098b7466-0828-4b42-855c-9e4095061276\" (UID: \"098b7466-0828-4b42-855c-9e4095061276\") " Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.177095 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/098b7466-0828-4b42-855c-9e4095061276-config-volume" (OuterVolumeSpecName: "config-volume") pod "098b7466-0828-4b42-855c-9e4095061276" (UID: "098b7466-0828-4b42-855c-9e4095061276"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.181410 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/098b7466-0828-4b42-855c-9e4095061276-kube-api-access-5cbdm" (OuterVolumeSpecName: "kube-api-access-5cbdm") pod "098b7466-0828-4b42-855c-9e4095061276" (UID: "098b7466-0828-4b42-855c-9e4095061276"). InnerVolumeSpecName "kube-api-access-5cbdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.182264 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/098b7466-0828-4b42-855c-9e4095061276-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "098b7466-0828-4b42-855c-9e4095061276" (UID: "098b7466-0828-4b42-855c-9e4095061276"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.278501 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cbdm\" (UniqueName: \"kubernetes.io/projected/098b7466-0828-4b42-855c-9e4095061276-kube-api-access-5cbdm\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.278550 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/098b7466-0828-4b42-855c-9e4095061276-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.278564 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/098b7466-0828-4b42-855c-9e4095061276-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.801036 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" event={"ID":"098b7466-0828-4b42-855c-9e4095061276","Type":"ContainerDied","Data":"11f797aa0fb6ec9ebcf70b782de1afdecd5ec0100548505682e30e241f7617ff"} Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.801074 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11f797aa0fb6ec9ebcf70b782de1afdecd5ec0100548505682e30e241f7617ff" Dec 15 08:45:03 crc kubenswrapper[4876]: I1215 08:45:03.801143 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms" Dec 15 08:45:04 crc kubenswrapper[4876]: I1215 08:45:04.136362 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg"] Dec 15 08:45:04 crc kubenswrapper[4876]: I1215 08:45:04.148277 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429760-22glg"] Dec 15 08:45:04 crc kubenswrapper[4876]: I1215 08:45:04.715948 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff39bd75-4416-44bc-b207-29c57dbb3975" path="/var/lib/kubelet/pods/ff39bd75-4416-44bc-b207-29c57dbb3975/volumes" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.575661 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 15 08:45:06 crc kubenswrapper[4876]: E1215 08:45:06.576323 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="098b7466-0828-4b42-855c-9e4095061276" containerName="collect-profiles" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.576340 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="098b7466-0828-4b42-855c-9e4095061276" containerName="collect-profiles" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.576543 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="098b7466-0828-4b42-855c-9e4095061276" containerName="collect-profiles" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.577469 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.579444 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.580441 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-rh55h" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.580475 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.597041 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.598677 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.610344 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.618153 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.619549 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.628438 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.642506 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.727755 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ded8e887-a7fc-4b8c-a51d-83039f35215b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ded8e887-a7fc-4b8c-a51d-83039f35215b\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.727810 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d76c52a3-b9f7-4807-9642-fa62750a87f3-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.727858 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv2z6\" (UniqueName: \"kubernetes.io/projected/b6826c99-fe6e-4280-ae05-f5f6794d2f74-kube-api-access-pv2z6\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.727888 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-34dfd1ae-2b04-431d-bd52-7947f0a3e6b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-34dfd1ae-2b04-431d-bd52-7947f0a3e6b1\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.727915 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cf9e91ec-2a1f-450f-8736-9d5f50335754-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.727932 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9e91ec-2a1f-450f-8736-9d5f50335754-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.727950 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b6826c99-fe6e-4280-ae05-f5f6794d2f74-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.727976 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d76c52a3-b9f7-4807-9642-fa62750a87f3-config\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.728003 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46cnl\" (UniqueName: \"kubernetes.io/projected/d76c52a3-b9f7-4807-9642-fa62750a87f3-kube-api-access-46cnl\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.728026 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6826c99-fe6e-4280-ae05-f5f6794d2f74-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.728043 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf9e91ec-2a1f-450f-8736-9d5f50335754-config\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.728076 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d76c52a3-b9f7-4807-9642-fa62750a87f3-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.728168 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75r2z\" (UniqueName: \"kubernetes.io/projected/cf9e91ec-2a1f-450f-8736-9d5f50335754-kube-api-access-75r2z\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.728221 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-97677e72-8c2c-4285-839c-fdc5cc00d04b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-97677e72-8c2c-4285-839c-fdc5cc00d04b\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.728242 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6826c99-fe6e-4280-ae05-f5f6794d2f74-config\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.728261 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf9e91ec-2a1f-450f-8736-9d5f50335754-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.728302 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6826c99-fe6e-4280-ae05-f5f6794d2f74-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.728324 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d76c52a3-b9f7-4807-9642-fa62750a87f3-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.831520 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75r2z\" (UniqueName: \"kubernetes.io/projected/cf9e91ec-2a1f-450f-8736-9d5f50335754-kube-api-access-75r2z\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.831602 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-97677e72-8c2c-4285-839c-fdc5cc00d04b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-97677e72-8c2c-4285-839c-fdc5cc00d04b\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.831639 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6826c99-fe6e-4280-ae05-f5f6794d2f74-config\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.831659 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf9e91ec-2a1f-450f-8736-9d5f50335754-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.831728 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d76c52a3-b9f7-4807-9642-fa62750a87f3-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.831746 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6826c99-fe6e-4280-ae05-f5f6794d2f74-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.831855 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ded8e887-a7fc-4b8c-a51d-83039f35215b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ded8e887-a7fc-4b8c-a51d-83039f35215b\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.831882 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d76c52a3-b9f7-4807-9642-fa62750a87f3-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.831964 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv2z6\" (UniqueName: \"kubernetes.io/projected/b6826c99-fe6e-4280-ae05-f5f6794d2f74-kube-api-access-pv2z6\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.831989 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-34dfd1ae-2b04-431d-bd52-7947f0a3e6b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-34dfd1ae-2b04-431d-bd52-7947f0a3e6b1\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.832052 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9e91ec-2a1f-450f-8736-9d5f50335754-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.832070 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cf9e91ec-2a1f-450f-8736-9d5f50335754-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.832089 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b6826c99-fe6e-4280-ae05-f5f6794d2f74-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.832129 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d76c52a3-b9f7-4807-9642-fa62750a87f3-config\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.832155 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46cnl\" (UniqueName: \"kubernetes.io/projected/d76c52a3-b9f7-4807-9642-fa62750a87f3-kube-api-access-46cnl\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.832189 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6826c99-fe6e-4280-ae05-f5f6794d2f74-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.832208 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf9e91ec-2a1f-450f-8736-9d5f50335754-config\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.832259 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d76c52a3-b9f7-4807-9642-fa62750a87f3-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.833384 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d76c52a3-b9f7-4807-9642-fa62750a87f3-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.833801 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b6826c99-fe6e-4280-ae05-f5f6794d2f74-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.833980 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d76c52a3-b9f7-4807-9642-fa62750a87f3-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.834227 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.834359 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf9e91ec-2a1f-450f-8736-9d5f50335754-config\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.834570 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cf9e91ec-2a1f-450f-8736-9d5f50335754-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.835161 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d76c52a3-b9f7-4807-9642-fa62750a87f3-config\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.835286 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6826c99-fe6e-4280-ae05-f5f6794d2f74-config\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.836590 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cf9e91ec-2a1f-450f-8736-9d5f50335754-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.837441 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.837476 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ded8e887-a7fc-4b8c-a51d-83039f35215b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ded8e887-a7fc-4b8c-a51d-83039f35215b\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cdb36d4de0b7c7b477ba44d58fe5bef95886426e8af89057d8b106988c12d42a/globalmount\"" pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.837519 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b6826c99-fe6e-4280-ae05-f5f6794d2f74-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.840671 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.840717 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-34dfd1ae-2b04-431d-bd52-7947f0a3e6b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-34dfd1ae-2b04-431d-bd52-7947f0a3e6b1\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d2ca49de56fb2a9ed620a33f66c17ac3b880da44e301ee7a52785775c683dff2/globalmount\"" pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.841918 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.842778 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.843950 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.844481 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-hsgfb" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.844514 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.846248 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.847032 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf9e91ec-2a1f-450f-8736-9d5f50335754-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.851645 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.851698 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-97677e72-8c2c-4285-839c-fdc5cc00d04b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-97677e72-8c2c-4285-839c-fdc5cc00d04b\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/73744ece3681e4378d2fcfd1d3d9980397627f58f04d8dc5bb61774a11064318/globalmount\"" pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.860041 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6826c99-fe6e-4280-ae05-f5f6794d2f74-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.861638 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75r2z\" (UniqueName: \"kubernetes.io/projected/cf9e91ec-2a1f-450f-8736-9d5f50335754-kube-api-access-75r2z\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.865632 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv2z6\" (UniqueName: \"kubernetes.io/projected/b6826c99-fe6e-4280-ae05-f5f6794d2f74-kube-api-access-pv2z6\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.872193 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d76c52a3-b9f7-4807-9642-fa62750a87f3-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.873634 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46cnl\" (UniqueName: \"kubernetes.io/projected/d76c52a3-b9f7-4807-9642-fa62750a87f3-kube-api-access-46cnl\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.876697 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.885146 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.887386 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.892221 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.900663 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.919037 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ded8e887-a7fc-4b8c-a51d-83039f35215b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ded8e887-a7fc-4b8c-a51d-83039f35215b\") pod \"ovsdbserver-nb-1\" (UID: \"d76c52a3-b9f7-4807-9642-fa62750a87f3\") " pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.919368 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-34dfd1ae-2b04-431d-bd52-7947f0a3e6b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-34dfd1ae-2b04-431d-bd52-7947f0a3e6b1\") pod \"ovsdbserver-nb-2\" (UID: \"cf9e91ec-2a1f-450f-8736-9d5f50335754\") " pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.931824 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.933210 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-97677e72-8c2c-4285-839c-fdc5cc00d04b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-97677e72-8c2c-4285-839c-fdc5cc00d04b\") pod \"ovsdbserver-nb-0\" (UID: \"b6826c99-fe6e-4280-ae05-f5f6794d2f74\") " pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.934710 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/afb0565c-3624-4f18-bc08-157348daa031-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.934745 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c02a6599-0085-4e35-8bad-f9512ef7ef42-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.934790 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ae421321-3ffb-4a4a-9039-39b6da0c79d5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae421321-3ffb-4a4a-9039-39b6da0c79d5\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.934807 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c02a6599-0085-4e35-8bad-f9512ef7ef42-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.934828 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/afb0565c-3624-4f18-bc08-157348daa031-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.934868 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2nv6\" (UniqueName: \"kubernetes.io/projected/b0df166e-bbcc-4954-88a7-73ecc378bcfa-kube-api-access-g2nv6\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.934945 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c02a6599-0085-4e35-8bad-f9512ef7ef42-config\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.934972 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0df166e-bbcc-4954-88a7-73ecc378bcfa-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.934997 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b0df166e-bbcc-4954-88a7-73ecc378bcfa-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.935090 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c11fcfc3-5451-4de6-bbb8-892360aa860f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c11fcfc3-5451-4de6-bbb8-892360aa860f\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.935154 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgkwz\" (UniqueName: \"kubernetes.io/projected/c02a6599-0085-4e35-8bad-f9512ef7ef42-kube-api-access-cgkwz\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.935201 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0df166e-bbcc-4954-88a7-73ecc378bcfa-config\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.935223 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0df166e-bbcc-4954-88a7-73ecc378bcfa-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.935252 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnsq5\" (UniqueName: \"kubernetes.io/projected/afb0565c-3624-4f18-bc08-157348daa031-kube-api-access-rnsq5\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.935277 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-50ae8e91-f7d3-41cd-8b92-96a968da9c7e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-50ae8e91-f7d3-41cd-8b92-96a968da9c7e\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.935316 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afb0565c-3624-4f18-bc08-157348daa031-config\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.935405 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c02a6599-0085-4e35-8bad-f9512ef7ef42-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.935435 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb0565c-3624-4f18-bc08-157348daa031-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:06 crc kubenswrapper[4876]: I1215 08:45:06.939518 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037568 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2nv6\" (UniqueName: \"kubernetes.io/projected/b0df166e-bbcc-4954-88a7-73ecc378bcfa-kube-api-access-g2nv6\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037649 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c02a6599-0085-4e35-8bad-f9512ef7ef42-config\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037728 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0df166e-bbcc-4954-88a7-73ecc378bcfa-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037751 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b0df166e-bbcc-4954-88a7-73ecc378bcfa-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037799 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c11fcfc3-5451-4de6-bbb8-892360aa860f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c11fcfc3-5451-4de6-bbb8-892360aa860f\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037826 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgkwz\" (UniqueName: \"kubernetes.io/projected/c02a6599-0085-4e35-8bad-f9512ef7ef42-kube-api-access-cgkwz\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037851 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0df166e-bbcc-4954-88a7-73ecc378bcfa-config\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037892 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0df166e-bbcc-4954-88a7-73ecc378bcfa-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037918 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnsq5\" (UniqueName: \"kubernetes.io/projected/afb0565c-3624-4f18-bc08-157348daa031-kube-api-access-rnsq5\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037960 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-50ae8e91-f7d3-41cd-8b92-96a968da9c7e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-50ae8e91-f7d3-41cd-8b92-96a968da9c7e\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.037990 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afb0565c-3624-4f18-bc08-157348daa031-config\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.038041 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c02a6599-0085-4e35-8bad-f9512ef7ef42-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.038062 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb0565c-3624-4f18-bc08-157348daa031-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.038092 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/afb0565c-3624-4f18-bc08-157348daa031-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.038145 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c02a6599-0085-4e35-8bad-f9512ef7ef42-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.038180 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ae421321-3ffb-4a4a-9039-39b6da0c79d5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae421321-3ffb-4a4a-9039-39b6da0c79d5\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.038212 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c02a6599-0085-4e35-8bad-f9512ef7ef42-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.038253 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/afb0565c-3624-4f18-bc08-157348daa031-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.039889 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c02a6599-0085-4e35-8bad-f9512ef7ef42-config\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.040374 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c02a6599-0085-4e35-8bad-f9512ef7ef42-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.040808 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/afb0565c-3624-4f18-bc08-157348daa031-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.041821 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afb0565c-3624-4f18-bc08-157348daa031-config\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.041849 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c02a6599-0085-4e35-8bad-f9512ef7ef42-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.042150 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/afb0565c-3624-4f18-bc08-157348daa031-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.045586 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.045590 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0df166e-bbcc-4954-88a7-73ecc378bcfa-config\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.045622 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-50ae8e91-f7d3-41cd-8b92-96a968da9c7e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-50ae8e91-f7d3-41cd-8b92-96a968da9c7e\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/fbb6a6de6a1551a24087140c00b049d7f615dfc0c7fd02b8a8d81775c7a489e7/globalmount\"" pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.046536 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.046563 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ae421321-3ffb-4a4a-9039-39b6da0c79d5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae421321-3ffb-4a4a-9039-39b6da0c79d5\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b40485d952d300a2dd59ea03730f1b38cfc6db64a03c5bc74a0d11750839b2d0/globalmount\"" pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.047390 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b0df166e-bbcc-4954-88a7-73ecc378bcfa-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.047409 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b0df166e-bbcc-4954-88a7-73ecc378bcfa-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.050315 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afb0565c-3624-4f18-bc08-157348daa031-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.050739 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c02a6599-0085-4e35-8bad-f9512ef7ef42-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.052120 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0df166e-bbcc-4954-88a7-73ecc378bcfa-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.060569 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.060602 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c11fcfc3-5451-4de6-bbb8-892360aa860f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c11fcfc3-5451-4de6-bbb8-892360aa860f\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/da9773c2246ef49ddcfe5241977ab325a75eedcd7f6dc0c78abc4c00089ad396/globalmount\"" pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.065453 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgkwz\" (UniqueName: \"kubernetes.io/projected/c02a6599-0085-4e35-8bad-f9512ef7ef42-kube-api-access-cgkwz\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.067803 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnsq5\" (UniqueName: \"kubernetes.io/projected/afb0565c-3624-4f18-bc08-157348daa031-kube-api-access-rnsq5\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.074187 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2nv6\" (UniqueName: \"kubernetes.io/projected/b0df166e-bbcc-4954-88a7-73ecc378bcfa-kube-api-access-g2nv6\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.098279 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c11fcfc3-5451-4de6-bbb8-892360aa860f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c11fcfc3-5451-4de6-bbb8-892360aa860f\") pod \"ovsdbserver-sb-2\" (UID: \"b0df166e-bbcc-4954-88a7-73ecc378bcfa\") " pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.099633 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-50ae8e91-f7d3-41cd-8b92-96a968da9c7e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-50ae8e91-f7d3-41cd-8b92-96a968da9c7e\") pod \"ovsdbserver-sb-1\" (UID: \"afb0565c-3624-4f18-bc08-157348daa031\") " pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.117355 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ae421321-3ffb-4a4a-9039-39b6da0c79d5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae421321-3ffb-4a4a-9039-39b6da0c79d5\") pod \"ovsdbserver-sb-0\" (UID: \"c02a6599-0085-4e35-8bad-f9512ef7ef42\") " pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.200156 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.353523 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.365649 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.380765 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.456839 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.686604 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 15 08:45:07 crc kubenswrapper[4876]: W1215 08:45:07.702766 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6826c99_fe6e_4280_ae05_f5f6794d2f74.slice/crio-fe76a82eec3e1e1db344a12fc09aa0d9d9b53243c18115bada7ba31cd3e75328 WatchSource:0}: Error finding container fe76a82eec3e1e1db344a12fc09aa0d9d9b53243c18115bada7ba31cd3e75328: Status 404 returned error can't find the container with id fe76a82eec3e1e1db344a12fc09aa0d9d9b53243c18115bada7ba31cd3e75328 Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.850121 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b6826c99-fe6e-4280-ae05-f5f6794d2f74","Type":"ContainerStarted","Data":"fe76a82eec3e1e1db344a12fc09aa0d9d9b53243c18115bada7ba31cd3e75328"} Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.851811 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"cf9e91ec-2a1f-450f-8736-9d5f50335754","Type":"ContainerStarted","Data":"d3372aad45cd238d0463c636e8236c3600eaddde9221635c592d8786c2572fd3"} Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.894524 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Dec 15 08:45:07 crc kubenswrapper[4876]: I1215 08:45:07.979694 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Dec 15 08:45:07 crc kubenswrapper[4876]: W1215 08:45:07.989045 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafb0565c_3624_4f18_bc08_157348daa031.slice/crio-836905c842c2b6d9c11068f118051d9f27a84d9942faaef647c1d3207b3e6f77 WatchSource:0}: Error finding container 836905c842c2b6d9c11068f118051d9f27a84d9942faaef647c1d3207b3e6f77: Status 404 returned error can't find the container with id 836905c842c2b6d9c11068f118051d9f27a84d9942faaef647c1d3207b3e6f77 Dec 15 08:45:08 crc kubenswrapper[4876]: I1215 08:45:08.428464 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 15 08:45:08 crc kubenswrapper[4876]: W1215 08:45:08.444853 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc02a6599_0085_4e35_8bad_f9512ef7ef42.slice/crio-041c88782a4b1e8aadb8a3098640dc881396dcc4be430db4adeb3443aae4f02d WatchSource:0}: Error finding container 041c88782a4b1e8aadb8a3098640dc881396dcc4be430db4adeb3443aae4f02d: Status 404 returned error can't find the container with id 041c88782a4b1e8aadb8a3098640dc881396dcc4be430db4adeb3443aae4f02d Dec 15 08:45:08 crc kubenswrapper[4876]: I1215 08:45:08.519680 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Dec 15 08:45:08 crc kubenswrapper[4876]: W1215 08:45:08.550202 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd76c52a3_b9f7_4807_9642_fa62750a87f3.slice/crio-ab50c324ff3a51ed04ff696176e001894c36a0634c5f80c48d32faec89300587 WatchSource:0}: Error finding container ab50c324ff3a51ed04ff696176e001894c36a0634c5f80c48d32faec89300587: Status 404 returned error can't find the container with id ab50c324ff3a51ed04ff696176e001894c36a0634c5f80c48d32faec89300587 Dec 15 08:45:08 crc kubenswrapper[4876]: I1215 08:45:08.860898 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"b0df166e-bbcc-4954-88a7-73ecc378bcfa","Type":"ContainerStarted","Data":"aeaa35f7b2cea8dc3dbe991c4554018d325122c4e39b4f8db5dad58066a90b9f"} Dec 15 08:45:08 crc kubenswrapper[4876]: I1215 08:45:08.862001 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"d76c52a3-b9f7-4807-9642-fa62750a87f3","Type":"ContainerStarted","Data":"ab50c324ff3a51ed04ff696176e001894c36a0634c5f80c48d32faec89300587"} Dec 15 08:45:08 crc kubenswrapper[4876]: I1215 08:45:08.863117 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c02a6599-0085-4e35-8bad-f9512ef7ef42","Type":"ContainerStarted","Data":"041c88782a4b1e8aadb8a3098640dc881396dcc4be430db4adeb3443aae4f02d"} Dec 15 08:45:08 crc kubenswrapper[4876]: I1215 08:45:08.864187 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"afb0565c-3624-4f18-bc08-157348daa031","Type":"ContainerStarted","Data":"836905c842c2b6d9c11068f118051d9f27a84d9942faaef647c1d3207b3e6f77"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.902870 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"d76c52a3-b9f7-4807-9642-fa62750a87f3","Type":"ContainerStarted","Data":"468fce05f27ac55e7d5f29fab45da2a8d5e9d831b3ddb4ecedf6db2ade419cd1"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.903578 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"d76c52a3-b9f7-4807-9642-fa62750a87f3","Type":"ContainerStarted","Data":"63c38aaab5ecfe32847d0effbf54640858bce515d5f86b891070104a88fa12fd"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.906883 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b6826c99-fe6e-4280-ae05-f5f6794d2f74","Type":"ContainerStarted","Data":"d0a1873a86e0d91dc74603f9f7cd82da6fdee3ddb36ae01314c35d47d46c533f"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.906922 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b6826c99-fe6e-4280-ae05-f5f6794d2f74","Type":"ContainerStarted","Data":"640172a9e69413ba085c3a456d82ee866504757d428e4907e56ff4bc69cf17c4"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.908989 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c02a6599-0085-4e35-8bad-f9512ef7ef42","Type":"ContainerStarted","Data":"55cd77a12386b4db2881d4d7565182cff3a742f02ca0ee39be295ff3bc9d2e34"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.909026 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c02a6599-0085-4e35-8bad-f9512ef7ef42","Type":"ContainerStarted","Data":"0686797d33ebc197537b6e806cfcd07641e1cad3a746796bfd2f4630beb53654"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.910544 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"afb0565c-3624-4f18-bc08-157348daa031","Type":"ContainerStarted","Data":"dc1b6b21c997f79718563ac6c63f5e71dac3166db3c6b51b73667f05dcb3fe02"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.910599 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"afb0565c-3624-4f18-bc08-157348daa031","Type":"ContainerStarted","Data":"81f5eea2d5120113dc06dcc275601f5cb0e0c6745e6e6b8db613e00ddebe6b48"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.912128 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"b0df166e-bbcc-4954-88a7-73ecc378bcfa","Type":"ContainerStarted","Data":"c2ea9a3afce700560bebac4ae59b3609d373e2de134476fa7db8c2b61de3704e"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.912156 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"b0df166e-bbcc-4954-88a7-73ecc378bcfa","Type":"ContainerStarted","Data":"569bb1275dfb9b051524d2a661ae060229a5bd3dd79abe7b285bdb51d274b3cf"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.913621 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"cf9e91ec-2a1f-450f-8736-9d5f50335754","Type":"ContainerStarted","Data":"284c1507f5d1af514008c99631e7d93136232c5d5d0ecb9d8d1712a840e97a86"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.913646 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"cf9e91ec-2a1f-450f-8736-9d5f50335754","Type":"ContainerStarted","Data":"48cdfb40b98d021b0efaf415b1c75bc0c23f224057b5e7d9c712b8267cbe93ee"} Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.926828 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=4.427174238 podStartE2EDuration="7.926811005s" podCreationTimestamp="2025-12-15 08:45:05 +0000 UTC" firstStartedPulling="2025-12-15 08:45:08.552482282 +0000 UTC m=+6834.123625193" lastFinishedPulling="2025-12-15 08:45:12.052119049 +0000 UTC m=+6837.623261960" observedRunningTime="2025-12-15 08:45:12.921131713 +0000 UTC m=+6838.492274654" watchObservedRunningTime="2025-12-15 08:45:12.926811005 +0000 UTC m=+6838.497953916" Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.931892 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.943335 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.943374 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.61108349 podStartE2EDuration="7.94335218s" podCreationTimestamp="2025-12-15 08:45:05 +0000 UTC" firstStartedPulling="2025-12-15 08:45:08.451051276 +0000 UTC m=+6834.022194187" lastFinishedPulling="2025-12-15 08:45:11.783319966 +0000 UTC m=+6837.354462877" observedRunningTime="2025-12-15 08:45:12.940573085 +0000 UTC m=+6838.511715996" watchObservedRunningTime="2025-12-15 08:45:12.94335218 +0000 UTC m=+6838.514495081" Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.960471 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.895899831 podStartE2EDuration="7.96045461s" podCreationTimestamp="2025-12-15 08:45:05 +0000 UTC" firstStartedPulling="2025-12-15 08:45:07.704678008 +0000 UTC m=+6833.275820919" lastFinishedPulling="2025-12-15 08:45:11.769232787 +0000 UTC m=+6837.340375698" observedRunningTime="2025-12-15 08:45:12.957314795 +0000 UTC m=+6838.528457716" watchObservedRunningTime="2025-12-15 08:45:12.96045461 +0000 UTC m=+6838.531597541" Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.973437 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.122026698 podStartE2EDuration="7.973418388s" podCreationTimestamp="2025-12-15 08:45:05 +0000 UTC" firstStartedPulling="2025-12-15 08:45:07.915576966 +0000 UTC m=+6833.486719877" lastFinishedPulling="2025-12-15 08:45:11.766968656 +0000 UTC m=+6837.338111567" observedRunningTime="2025-12-15 08:45:12.971724432 +0000 UTC m=+6838.542867353" watchObservedRunningTime="2025-12-15 08:45:12.973418388 +0000 UTC m=+6838.544561299" Dec 15 08:45:12 crc kubenswrapper[4876]: I1215 08:45:12.990663 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=3.692930225 podStartE2EDuration="7.99064515s" podCreationTimestamp="2025-12-15 08:45:05 +0000 UTC" firstStartedPulling="2025-12-15 08:45:07.470541926 +0000 UTC m=+6833.041684827" lastFinishedPulling="2025-12-15 08:45:11.768256841 +0000 UTC m=+6837.339399752" observedRunningTime="2025-12-15 08:45:12.987682781 +0000 UTC m=+6838.558825702" watchObservedRunningTime="2025-12-15 08:45:12.99064515 +0000 UTC m=+6838.561788071" Dec 15 08:45:13 crc kubenswrapper[4876]: I1215 08:45:13.008975 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=4.234423346 podStartE2EDuration="8.008956992s" podCreationTimestamp="2025-12-15 08:45:05 +0000 UTC" firstStartedPulling="2025-12-15 08:45:07.991544237 +0000 UTC m=+6833.562687148" lastFinishedPulling="2025-12-15 08:45:11.766077883 +0000 UTC m=+6837.337220794" observedRunningTime="2025-12-15 08:45:13.003085295 +0000 UTC m=+6838.574228216" watchObservedRunningTime="2025-12-15 08:45:13.008956992 +0000 UTC m=+6838.580099903" Dec 15 08:45:13 crc kubenswrapper[4876]: I1215 08:45:13.200915 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:13 crc kubenswrapper[4876]: I1215 08:45:13.355339 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:13 crc kubenswrapper[4876]: I1215 08:45:13.366797 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:13 crc kubenswrapper[4876]: I1215 08:45:13.381189 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:15 crc kubenswrapper[4876]: I1215 08:45:15.976752 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:15 crc kubenswrapper[4876]: I1215 08:45:15.977291 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:15 crc kubenswrapper[4876]: I1215 08:45:15.980044 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:15 crc kubenswrapper[4876]: I1215 08:45:15.980292 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:16 crc kubenswrapper[4876]: I1215 08:45:16.239426 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:16 crc kubenswrapper[4876]: I1215 08:45:16.239975 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:16 crc kubenswrapper[4876]: I1215 08:45:16.395657 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:16 crc kubenswrapper[4876]: I1215 08:45:16.396196 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:16 crc kubenswrapper[4876]: I1215 08:45:16.405083 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:16 crc kubenswrapper[4876]: I1215 08:45:16.406058 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:16 crc kubenswrapper[4876]: I1215 08:45:16.427815 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:16 crc kubenswrapper[4876]: I1215 08:45:16.428386 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.241134 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.398475 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.420202 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.452413 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.523459 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d5f5789d7-cc8fj"] Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.524777 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.528752 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.549696 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5f5789d7-cc8fj"] Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.621442 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-config\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.621511 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6l22\" (UniqueName: \"kubernetes.io/projected/3321f28b-aa33-42a6-a8be-37aa6a6a257a-kube-api-access-v6l22\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.621571 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-dns-svc\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.621641 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.723013 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.723117 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-config\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.723162 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6l22\" (UniqueName: \"kubernetes.io/projected/3321f28b-aa33-42a6-a8be-37aa6a6a257a-kube-api-access-v6l22\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.723214 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-dns-svc\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.724224 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-dns-svc\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.724847 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.725494 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-config\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.755033 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6l22\" (UniqueName: \"kubernetes.io/projected/3321f28b-aa33-42a6-a8be-37aa6a6a257a-kube-api-access-v6l22\") pod \"dnsmasq-dns-6d5f5789d7-cc8fj\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.778809 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5f5789d7-cc8fj"] Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.783149 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.802761 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-65575b46ff-vz4zr"] Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.804699 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.808275 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.811070 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65575b46ff-vz4zr"] Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.926161 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-config\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.926214 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-dns-svc\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.926312 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-sb\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.926363 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-nb\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:17 crc kubenswrapper[4876]: I1215 08:45:17.926450 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddzrb\" (UniqueName: \"kubernetes.io/projected/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-kube-api-access-ddzrb\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.027923 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddzrb\" (UniqueName: \"kubernetes.io/projected/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-kube-api-access-ddzrb\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.028073 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-config\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.028115 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-dns-svc\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.028221 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-sb\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.028257 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-nb\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.031726 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-sb\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.032524 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-dns-svc\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.032626 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-config\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.032735 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-nb\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.047529 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddzrb\" (UniqueName: \"kubernetes.io/projected/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-kube-api-access-ddzrb\") pod \"dnsmasq-dns-65575b46ff-vz4zr\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.196003 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.247078 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5f5789d7-cc8fj"] Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.673371 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65575b46ff-vz4zr"] Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.964616 4876 generic.go:334] "Generic (PLEG): container finished" podID="3321f28b-aa33-42a6-a8be-37aa6a6a257a" containerID="7f29cf9c47f82a9a810f14424b9ad3503717af8b1065f173ad35b67c69660b34" exitCode=0 Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.964684 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" event={"ID":"3321f28b-aa33-42a6-a8be-37aa6a6a257a","Type":"ContainerDied","Data":"7f29cf9c47f82a9a810f14424b9ad3503717af8b1065f173ad35b67c69660b34"} Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.965188 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" event={"ID":"3321f28b-aa33-42a6-a8be-37aa6a6a257a","Type":"ContainerStarted","Data":"f0559f0e340fbed51eab8895cf7f2768ef7f4cd3bb229935de6faf22f7e043d2"} Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.967719 4876 generic.go:334] "Generic (PLEG): container finished" podID="46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" containerID="d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52" exitCode=0 Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.967759 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" event={"ID":"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd","Type":"ContainerDied","Data":"d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52"} Dec 15 08:45:18 crc kubenswrapper[4876]: I1215 08:45:18.967784 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" event={"ID":"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd","Type":"ContainerStarted","Data":"ebfe68d42d018076ca0efa9511654ad0efce2eb1198dc830b718bd31ba8e0e7a"} Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.270301 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.351461 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6l22\" (UniqueName: \"kubernetes.io/projected/3321f28b-aa33-42a6-a8be-37aa6a6a257a-kube-api-access-v6l22\") pod \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.351549 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-ovsdbserver-nb\") pod \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.351582 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-dns-svc\") pod \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.351662 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-config\") pod \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\" (UID: \"3321f28b-aa33-42a6-a8be-37aa6a6a257a\") " Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.358393 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3321f28b-aa33-42a6-a8be-37aa6a6a257a-kube-api-access-v6l22" (OuterVolumeSpecName: "kube-api-access-v6l22") pod "3321f28b-aa33-42a6-a8be-37aa6a6a257a" (UID: "3321f28b-aa33-42a6-a8be-37aa6a6a257a"). InnerVolumeSpecName "kube-api-access-v6l22". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.373230 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-config" (OuterVolumeSpecName: "config") pod "3321f28b-aa33-42a6-a8be-37aa6a6a257a" (UID: "3321f28b-aa33-42a6-a8be-37aa6a6a257a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.374555 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3321f28b-aa33-42a6-a8be-37aa6a6a257a" (UID: "3321f28b-aa33-42a6-a8be-37aa6a6a257a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.376431 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3321f28b-aa33-42a6-a8be-37aa6a6a257a" (UID: "3321f28b-aa33-42a6-a8be-37aa6a6a257a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.454317 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.454358 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6l22\" (UniqueName: \"kubernetes.io/projected/3321f28b-aa33-42a6-a8be-37aa6a6a257a-kube-api-access-v6l22\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.454371 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.454380 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3321f28b-aa33-42a6-a8be-37aa6a6a257a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.977227 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.977248 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5f5789d7-cc8fj" event={"ID":"3321f28b-aa33-42a6-a8be-37aa6a6a257a","Type":"ContainerDied","Data":"f0559f0e340fbed51eab8895cf7f2768ef7f4cd3bb229935de6faf22f7e043d2"} Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.977617 4876 scope.go:117] "RemoveContainer" containerID="7f29cf9c47f82a9a810f14424b9ad3503717af8b1065f173ad35b67c69660b34" Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.981174 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" event={"ID":"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd","Type":"ContainerStarted","Data":"ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e"} Dec 15 08:45:19 crc kubenswrapper[4876]: I1215 08:45:19.981303 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:20 crc kubenswrapper[4876]: I1215 08:45:20.045829 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" podStartSLOduration=3.045771717 podStartE2EDuration="3.045771717s" podCreationTimestamp="2025-12-15 08:45:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:45:20.003544512 +0000 UTC m=+6845.574687423" watchObservedRunningTime="2025-12-15 08:45:20.045771717 +0000 UTC m=+6845.616914638" Dec 15 08:45:20 crc kubenswrapper[4876]: I1215 08:45:20.059040 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5f5789d7-cc8fj"] Dec 15 08:45:20 crc kubenswrapper[4876]: I1215 08:45:20.066321 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d5f5789d7-cc8fj"] Dec 15 08:45:20 crc kubenswrapper[4876]: I1215 08:45:20.715919 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3321f28b-aa33-42a6-a8be-37aa6a6a257a" path="/var/lib/kubelet/pods/3321f28b-aa33-42a6-a8be-37aa6a6a257a/volumes" Dec 15 08:45:21 crc kubenswrapper[4876]: I1215 08:45:21.975606 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Dec 15 08:45:21 crc kubenswrapper[4876]: I1215 08:45:21.981505 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Dec 15 08:45:23 crc kubenswrapper[4876]: I1215 08:45:23.980179 4876 scope.go:117] "RemoveContainer" containerID="8c5d97fe05cc1d994041421a8ea06f64102d045b63c6bddb963e21afb7c16790" Dec 15 08:45:24 crc kubenswrapper[4876]: I1215 08:45:24.779060 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Dec 15 08:45:24 crc kubenswrapper[4876]: E1215 08:45:24.779924 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3321f28b-aa33-42a6-a8be-37aa6a6a257a" containerName="init" Dec 15 08:45:24 crc kubenswrapper[4876]: I1215 08:45:24.780067 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3321f28b-aa33-42a6-a8be-37aa6a6a257a" containerName="init" Dec 15 08:45:24 crc kubenswrapper[4876]: I1215 08:45:24.780510 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3321f28b-aa33-42a6-a8be-37aa6a6a257a" containerName="init" Dec 15 08:45:24 crc kubenswrapper[4876]: I1215 08:45:24.781547 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Dec 15 08:45:24 crc kubenswrapper[4876]: I1215 08:45:24.784621 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Dec 15 08:45:24 crc kubenswrapper[4876]: I1215 08:45:24.786329 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Dec 15 08:45:24 crc kubenswrapper[4876]: I1215 08:45:24.938016 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/83a33547-3699-45b4-bf4f-989b401d6b95-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " pod="openstack/ovn-copy-data" Dec 15 08:45:24 crc kubenswrapper[4876]: I1215 08:45:24.938120 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc95f\" (UniqueName: \"kubernetes.io/projected/83a33547-3699-45b4-bf4f-989b401d6b95-kube-api-access-vc95f\") pod \"ovn-copy-data\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " pod="openstack/ovn-copy-data" Dec 15 08:45:24 crc kubenswrapper[4876]: I1215 08:45:24.938190 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-24d750a0-2c75-4182-8cf0-8340f499021d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24d750a0-2c75-4182-8cf0-8340f499021d\") pod \"ovn-copy-data\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " pod="openstack/ovn-copy-data" Dec 15 08:45:25 crc kubenswrapper[4876]: I1215 08:45:25.048966 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/83a33547-3699-45b4-bf4f-989b401d6b95-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " pod="openstack/ovn-copy-data" Dec 15 08:45:25 crc kubenswrapper[4876]: I1215 08:45:25.049126 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc95f\" (UniqueName: \"kubernetes.io/projected/83a33547-3699-45b4-bf4f-989b401d6b95-kube-api-access-vc95f\") pod \"ovn-copy-data\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " pod="openstack/ovn-copy-data" Dec 15 08:45:25 crc kubenswrapper[4876]: I1215 08:45:25.049168 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-24d750a0-2c75-4182-8cf0-8340f499021d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24d750a0-2c75-4182-8cf0-8340f499021d\") pod \"ovn-copy-data\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " pod="openstack/ovn-copy-data" Dec 15 08:45:25 crc kubenswrapper[4876]: I1215 08:45:25.054735 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:45:25 crc kubenswrapper[4876]: I1215 08:45:25.054867 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-24d750a0-2c75-4182-8cf0-8340f499021d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24d750a0-2c75-4182-8cf0-8340f499021d\") pod \"ovn-copy-data\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/42dd1d3ee0aff3762f180f443aa8ffb32784795c9c38ae035e856a3bb6f19268/globalmount\"" pod="openstack/ovn-copy-data" Dec 15 08:45:25 crc kubenswrapper[4876]: I1215 08:45:25.056632 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/83a33547-3699-45b4-bf4f-989b401d6b95-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " pod="openstack/ovn-copy-data" Dec 15 08:45:25 crc kubenswrapper[4876]: I1215 08:45:25.066833 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc95f\" (UniqueName: \"kubernetes.io/projected/83a33547-3699-45b4-bf4f-989b401d6b95-kube-api-access-vc95f\") pod \"ovn-copy-data\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " pod="openstack/ovn-copy-data" Dec 15 08:45:25 crc kubenswrapper[4876]: I1215 08:45:25.082520 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-24d750a0-2c75-4182-8cf0-8340f499021d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24d750a0-2c75-4182-8cf0-8340f499021d\") pod \"ovn-copy-data\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " pod="openstack/ovn-copy-data" Dec 15 08:45:25 crc kubenswrapper[4876]: I1215 08:45:25.110457 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Dec 15 08:45:25 crc kubenswrapper[4876]: I1215 08:45:25.588974 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Dec 15 08:45:26 crc kubenswrapper[4876]: I1215 08:45:26.050851 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"83a33547-3699-45b4-bf4f-989b401d6b95","Type":"ContainerStarted","Data":"e90703f1a548ac1616dede69348295defe6b7ee2d42beeee81ef15369de3e9fe"} Dec 15 08:45:27 crc kubenswrapper[4876]: I1215 08:45:27.060953 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"83a33547-3699-45b4-bf4f-989b401d6b95","Type":"ContainerStarted","Data":"27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1"} Dec 15 08:45:27 crc kubenswrapper[4876]: I1215 08:45:27.087912 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.075529787 podStartE2EDuration="4.087884323s" podCreationTimestamp="2025-12-15 08:45:23 +0000 UTC" firstStartedPulling="2025-12-15 08:45:25.593819932 +0000 UTC m=+6851.164962843" lastFinishedPulling="2025-12-15 08:45:26.606174468 +0000 UTC m=+6852.177317379" observedRunningTime="2025-12-15 08:45:27.082541709 +0000 UTC m=+6852.653684620" watchObservedRunningTime="2025-12-15 08:45:27.087884323 +0000 UTC m=+6852.659027234" Dec 15 08:45:27 crc kubenswrapper[4876]: I1215 08:45:27.322866 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:45:27 crc kubenswrapper[4876]: I1215 08:45:27.322978 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:45:27 crc kubenswrapper[4876]: I1215 08:45:27.323038 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 08:45:27 crc kubenswrapper[4876]: I1215 08:45:27.324012 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"aba1a98a0d3176406b1734d4bed65f4bfe3fed20e881d715632d44c039a22cbd"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 08:45:27 crc kubenswrapper[4876]: I1215 08:45:27.324089 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://aba1a98a0d3176406b1734d4bed65f4bfe3fed20e881d715632d44c039a22cbd" gracePeriod=600 Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.071292 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="aba1a98a0d3176406b1734d4bed65f4bfe3fed20e881d715632d44c039a22cbd" exitCode=0 Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.071363 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"aba1a98a0d3176406b1734d4bed65f4bfe3fed20e881d715632d44c039a22cbd"} Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.071866 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0"} Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.071890 4876 scope.go:117] "RemoveContainer" containerID="6cccc2b2d1e442ede616a864e652cdd09402fd4204759a29bd49f2708208464a" Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.197303 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.249531 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55db7cd99c-lvnxm"] Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.249776 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" podUID="42352742-25a2-42ee-b8b0-7e2e8b074ec2" containerName="dnsmasq-dns" containerID="cri-o://c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a" gracePeriod=10 Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.665531 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.813596 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-config\") pod \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.813797 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-dns-svc\") pod \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.813877 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9m8v\" (UniqueName: \"kubernetes.io/projected/42352742-25a2-42ee-b8b0-7e2e8b074ec2-kube-api-access-p9m8v\") pod \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\" (UID: \"42352742-25a2-42ee-b8b0-7e2e8b074ec2\") " Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.818818 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42352742-25a2-42ee-b8b0-7e2e8b074ec2-kube-api-access-p9m8v" (OuterVolumeSpecName: "kube-api-access-p9m8v") pod "42352742-25a2-42ee-b8b0-7e2e8b074ec2" (UID: "42352742-25a2-42ee-b8b0-7e2e8b074ec2"). InnerVolumeSpecName "kube-api-access-p9m8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.851773 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "42352742-25a2-42ee-b8b0-7e2e8b074ec2" (UID: "42352742-25a2-42ee-b8b0-7e2e8b074ec2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.859893 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-config" (OuterVolumeSpecName: "config") pod "42352742-25a2-42ee-b8b0-7e2e8b074ec2" (UID: "42352742-25a2-42ee-b8b0-7e2e8b074ec2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.916783 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.916833 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9m8v\" (UniqueName: \"kubernetes.io/projected/42352742-25a2-42ee-b8b0-7e2e8b074ec2-kube-api-access-p9m8v\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:28 crc kubenswrapper[4876]: I1215 08:45:28.916847 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42352742-25a2-42ee-b8b0-7e2e8b074ec2-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.082999 4876 generic.go:334] "Generic (PLEG): container finished" podID="42352742-25a2-42ee-b8b0-7e2e8b074ec2" containerID="c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a" exitCode=0 Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.083188 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.083199 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" event={"ID":"42352742-25a2-42ee-b8b0-7e2e8b074ec2","Type":"ContainerDied","Data":"c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a"} Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.083242 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55db7cd99c-lvnxm" event={"ID":"42352742-25a2-42ee-b8b0-7e2e8b074ec2","Type":"ContainerDied","Data":"50974e266a4d87c8cb2be3bd5c6c78b42aea8a1726081b4b4c880eaab9dc467c"} Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.083275 4876 scope.go:117] "RemoveContainer" containerID="c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a" Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.115463 4876 scope.go:117] "RemoveContainer" containerID="09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8" Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.116918 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55db7cd99c-lvnxm"] Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.124426 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55db7cd99c-lvnxm"] Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.139521 4876 scope.go:117] "RemoveContainer" containerID="c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a" Dec 15 08:45:29 crc kubenswrapper[4876]: E1215 08:45:29.140204 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a\": container with ID starting with c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a not found: ID does not exist" containerID="c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a" Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.140257 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a"} err="failed to get container status \"c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a\": rpc error: code = NotFound desc = could not find container \"c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a\": container with ID starting with c65058cba60f63c0edb3c9f950eb564f9489c5941accd1a5eb94e1392d6dbd0a not found: ID does not exist" Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.140291 4876 scope.go:117] "RemoveContainer" containerID="09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8" Dec 15 08:45:29 crc kubenswrapper[4876]: E1215 08:45:29.140641 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8\": container with ID starting with 09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8 not found: ID does not exist" containerID="09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8" Dec 15 08:45:29 crc kubenswrapper[4876]: I1215 08:45:29.140670 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8"} err="failed to get container status \"09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8\": rpc error: code = NotFound desc = could not find container \"09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8\": container with ID starting with 09ffb64af92202352d8395c8e6c2eaf0a3cfa8e5ac9a8e40ad3b6d6c0be757b8 not found: ID does not exist" Dec 15 08:45:30 crc kubenswrapper[4876]: I1215 08:45:30.716641 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42352742-25a2-42ee-b8b0-7e2e8b074ec2" path="/var/lib/kubelet/pods/42352742-25a2-42ee-b8b0-7e2e8b074ec2/volumes" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.358386 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 15 08:45:34 crc kubenswrapper[4876]: E1215 08:45:34.359275 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42352742-25a2-42ee-b8b0-7e2e8b074ec2" containerName="init" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.359291 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="42352742-25a2-42ee-b8b0-7e2e8b074ec2" containerName="init" Dec 15 08:45:34 crc kubenswrapper[4876]: E1215 08:45:34.359329 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42352742-25a2-42ee-b8b0-7e2e8b074ec2" containerName="dnsmasq-dns" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.359336 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="42352742-25a2-42ee-b8b0-7e2e8b074ec2" containerName="dnsmasq-dns" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.359500 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="42352742-25a2-42ee-b8b0-7e2e8b074ec2" containerName="dnsmasq-dns" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.360564 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.362775 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.363184 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.363196 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-n9fc9" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.375320 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.517614 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpzw7\" (UniqueName: \"kubernetes.io/projected/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-kube-api-access-wpzw7\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.517667 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-scripts\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.517701 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.517742 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-config\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.517822 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.619170 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.619249 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpzw7\" (UniqueName: \"kubernetes.io/projected/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-kube-api-access-wpzw7\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.619267 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-scripts\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.619292 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.619553 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-config\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.619696 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.620296 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-scripts\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.620348 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-config\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.629269 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.640279 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpzw7\" (UniqueName: \"kubernetes.io/projected/48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8-kube-api-access-wpzw7\") pod \"ovn-northd-0\" (UID: \"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8\") " pod="openstack/ovn-northd-0" Dec 15 08:45:34 crc kubenswrapper[4876]: I1215 08:45:34.686896 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 15 08:45:35 crc kubenswrapper[4876]: I1215 08:45:35.176619 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 15 08:45:36 crc kubenswrapper[4876]: I1215 08:45:36.158463 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8","Type":"ContainerStarted","Data":"e704892acdf5980fa11aab19a291ba6a9d5146f468ebc9e257ce219ac96c5dbf"} Dec 15 08:45:37 crc kubenswrapper[4876]: I1215 08:45:37.170327 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8","Type":"ContainerStarted","Data":"8a195ec8a0f3227366e5515b561bcc809c86d82274d996f5f523c5d55b525edb"} Dec 15 08:45:37 crc kubenswrapper[4876]: I1215 08:45:37.171338 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8","Type":"ContainerStarted","Data":"5d166a12e4a5a0ffd169e1778b0d4803766d0fa4c44a9c187c1f84901816bcb3"} Dec 15 08:45:37 crc kubenswrapper[4876]: I1215 08:45:37.171363 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 15 08:45:37 crc kubenswrapper[4876]: I1215 08:45:37.190499 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.968501877 podStartE2EDuration="3.190477245s" podCreationTimestamp="2025-12-15 08:45:34 +0000 UTC" firstStartedPulling="2025-12-15 08:45:35.184439016 +0000 UTC m=+6860.755581927" lastFinishedPulling="2025-12-15 08:45:36.406414384 +0000 UTC m=+6861.977557295" observedRunningTime="2025-12-15 08:45:37.18878016 +0000 UTC m=+6862.759923081" watchObservedRunningTime="2025-12-15 08:45:37.190477245 +0000 UTC m=+6862.761620156" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.084319 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-br4mg"] Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.092997 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-br4mg" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.169310 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-br4mg"] Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.177229 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a98bf7d3-593a-47fd-b8ba-70207cc611d4-operator-scripts\") pod \"keystone-db-create-br4mg\" (UID: \"a98bf7d3-593a-47fd-b8ba-70207cc611d4\") " pod="openstack/keystone-db-create-br4mg" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.177308 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqpqb\" (UniqueName: \"kubernetes.io/projected/a98bf7d3-593a-47fd-b8ba-70207cc611d4-kube-api-access-kqpqb\") pod \"keystone-db-create-br4mg\" (UID: \"a98bf7d3-593a-47fd-b8ba-70207cc611d4\") " pod="openstack/keystone-db-create-br4mg" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.211186 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-2f87-account-create-update-hlzht"] Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.212510 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2f87-account-create-update-hlzht" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.226680 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.263229 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-2f87-account-create-update-hlzht"] Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.278896 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a98bf7d3-593a-47fd-b8ba-70207cc611d4-operator-scripts\") pod \"keystone-db-create-br4mg\" (UID: \"a98bf7d3-593a-47fd-b8ba-70207cc611d4\") " pod="openstack/keystone-db-create-br4mg" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.280168 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-operator-scripts\") pod \"keystone-2f87-account-create-update-hlzht\" (UID: \"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd\") " pod="openstack/keystone-2f87-account-create-update-hlzht" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.280359 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqpqb\" (UniqueName: \"kubernetes.io/projected/a98bf7d3-593a-47fd-b8ba-70207cc611d4-kube-api-access-kqpqb\") pod \"keystone-db-create-br4mg\" (UID: \"a98bf7d3-593a-47fd-b8ba-70207cc611d4\") " pod="openstack/keystone-db-create-br4mg" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.280492 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnpvx\" (UniqueName: \"kubernetes.io/projected/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-kube-api-access-dnpvx\") pod \"keystone-2f87-account-create-update-hlzht\" (UID: \"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd\") " pod="openstack/keystone-2f87-account-create-update-hlzht" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.281118 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a98bf7d3-593a-47fd-b8ba-70207cc611d4-operator-scripts\") pod \"keystone-db-create-br4mg\" (UID: \"a98bf7d3-593a-47fd-b8ba-70207cc611d4\") " pod="openstack/keystone-db-create-br4mg" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.306209 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqpqb\" (UniqueName: \"kubernetes.io/projected/a98bf7d3-593a-47fd-b8ba-70207cc611d4-kube-api-access-kqpqb\") pod \"keystone-db-create-br4mg\" (UID: \"a98bf7d3-593a-47fd-b8ba-70207cc611d4\") " pod="openstack/keystone-db-create-br4mg" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.382231 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-operator-scripts\") pod \"keystone-2f87-account-create-update-hlzht\" (UID: \"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd\") " pod="openstack/keystone-2f87-account-create-update-hlzht" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.382283 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnpvx\" (UniqueName: \"kubernetes.io/projected/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-kube-api-access-dnpvx\") pod \"keystone-2f87-account-create-update-hlzht\" (UID: \"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd\") " pod="openstack/keystone-2f87-account-create-update-hlzht" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.383560 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-operator-scripts\") pod \"keystone-2f87-account-create-update-hlzht\" (UID: \"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd\") " pod="openstack/keystone-2f87-account-create-update-hlzht" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.399758 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnpvx\" (UniqueName: \"kubernetes.io/projected/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-kube-api-access-dnpvx\") pod \"keystone-2f87-account-create-update-hlzht\" (UID: \"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd\") " pod="openstack/keystone-2f87-account-create-update-hlzht" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.485455 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-br4mg" Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.542500 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2f87-account-create-update-hlzht" Dec 15 08:45:42 crc kubenswrapper[4876]: W1215 08:45:42.930498 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda98bf7d3_593a_47fd_b8ba_70207cc611d4.slice/crio-1f3539b3134c2be51146f769f3240b6d1935643d08bf2a820e6800d42132308f WatchSource:0}: Error finding container 1f3539b3134c2be51146f769f3240b6d1935643d08bf2a820e6800d42132308f: Status 404 returned error can't find the container with id 1f3539b3134c2be51146f769f3240b6d1935643d08bf2a820e6800d42132308f Dec 15 08:45:42 crc kubenswrapper[4876]: I1215 08:45:42.931456 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-br4mg"] Dec 15 08:45:43 crc kubenswrapper[4876]: I1215 08:45:43.034865 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-2f87-account-create-update-hlzht"] Dec 15 08:45:43 crc kubenswrapper[4876]: W1215 08:45:43.043054 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0c96afc_ef13_43e7_a2c3_93fd949bdfdd.slice/crio-6c39c2a62f9feb45357b6547bc2ccf5f85de0b415ca6292ea90c1f339d6e4bfc WatchSource:0}: Error finding container 6c39c2a62f9feb45357b6547bc2ccf5f85de0b415ca6292ea90c1f339d6e4bfc: Status 404 returned error can't find the container with id 6c39c2a62f9feb45357b6547bc2ccf5f85de0b415ca6292ea90c1f339d6e4bfc Dec 15 08:45:43 crc kubenswrapper[4876]: I1215 08:45:43.260075 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-br4mg" event={"ID":"a98bf7d3-593a-47fd-b8ba-70207cc611d4","Type":"ContainerStarted","Data":"2bcb602dd33c4efcc5b077ed2d55caec5e0dcea3bbafc32b71404c7cf511ab00"} Dec 15 08:45:43 crc kubenswrapper[4876]: I1215 08:45:43.260142 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-br4mg" event={"ID":"a98bf7d3-593a-47fd-b8ba-70207cc611d4","Type":"ContainerStarted","Data":"1f3539b3134c2be51146f769f3240b6d1935643d08bf2a820e6800d42132308f"} Dec 15 08:45:43 crc kubenswrapper[4876]: I1215 08:45:43.261585 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2f87-account-create-update-hlzht" event={"ID":"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd","Type":"ContainerStarted","Data":"0d28036ed6036e6481aee8b4bcf6495b6a315357cbfe112ee960f58a8d3f9c44"} Dec 15 08:45:43 crc kubenswrapper[4876]: I1215 08:45:43.261648 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2f87-account-create-update-hlzht" event={"ID":"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd","Type":"ContainerStarted","Data":"6c39c2a62f9feb45357b6547bc2ccf5f85de0b415ca6292ea90c1f339d6e4bfc"} Dec 15 08:45:43 crc kubenswrapper[4876]: I1215 08:45:43.294681 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-2f87-account-create-update-hlzht" podStartSLOduration=1.2946606059999999 podStartE2EDuration="1.294660606s" podCreationTimestamp="2025-12-15 08:45:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:45:43.288622114 +0000 UTC m=+6868.859765035" watchObservedRunningTime="2025-12-15 08:45:43.294660606 +0000 UTC m=+6868.865803517" Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.270773 4876 generic.go:334] "Generic (PLEG): container finished" podID="a98bf7d3-593a-47fd-b8ba-70207cc611d4" containerID="2bcb602dd33c4efcc5b077ed2d55caec5e0dcea3bbafc32b71404c7cf511ab00" exitCode=0 Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.270914 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-br4mg" event={"ID":"a98bf7d3-593a-47fd-b8ba-70207cc611d4","Type":"ContainerDied","Data":"2bcb602dd33c4efcc5b077ed2d55caec5e0dcea3bbafc32b71404c7cf511ab00"} Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.274192 4876 generic.go:334] "Generic (PLEG): container finished" podID="c0c96afc-ef13-43e7-a2c3-93fd949bdfdd" containerID="0d28036ed6036e6481aee8b4bcf6495b6a315357cbfe112ee960f58a8d3f9c44" exitCode=0 Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.274219 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2f87-account-create-update-hlzht" event={"ID":"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd","Type":"ContainerDied","Data":"0d28036ed6036e6481aee8b4bcf6495b6a315357cbfe112ee960f58a8d3f9c44"} Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.567995 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-br4mg" Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.620580 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqpqb\" (UniqueName: \"kubernetes.io/projected/a98bf7d3-593a-47fd-b8ba-70207cc611d4-kube-api-access-kqpqb\") pod \"a98bf7d3-593a-47fd-b8ba-70207cc611d4\" (UID: \"a98bf7d3-593a-47fd-b8ba-70207cc611d4\") " Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.620648 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a98bf7d3-593a-47fd-b8ba-70207cc611d4-operator-scripts\") pod \"a98bf7d3-593a-47fd-b8ba-70207cc611d4\" (UID: \"a98bf7d3-593a-47fd-b8ba-70207cc611d4\") " Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.621483 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a98bf7d3-593a-47fd-b8ba-70207cc611d4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a98bf7d3-593a-47fd-b8ba-70207cc611d4" (UID: "a98bf7d3-593a-47fd-b8ba-70207cc611d4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.628968 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a98bf7d3-593a-47fd-b8ba-70207cc611d4-kube-api-access-kqpqb" (OuterVolumeSpecName: "kube-api-access-kqpqb") pod "a98bf7d3-593a-47fd-b8ba-70207cc611d4" (UID: "a98bf7d3-593a-47fd-b8ba-70207cc611d4"). InnerVolumeSpecName "kube-api-access-kqpqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.721980 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqpqb\" (UniqueName: \"kubernetes.io/projected/a98bf7d3-593a-47fd-b8ba-70207cc611d4-kube-api-access-kqpqb\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:44 crc kubenswrapper[4876]: I1215 08:45:44.722017 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a98bf7d3-593a-47fd-b8ba-70207cc611d4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:45 crc kubenswrapper[4876]: I1215 08:45:45.281839 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-br4mg" Dec 15 08:45:45 crc kubenswrapper[4876]: I1215 08:45:45.281976 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-br4mg" event={"ID":"a98bf7d3-593a-47fd-b8ba-70207cc611d4","Type":"ContainerDied","Data":"1f3539b3134c2be51146f769f3240b6d1935643d08bf2a820e6800d42132308f"} Dec 15 08:45:45 crc kubenswrapper[4876]: I1215 08:45:45.282334 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f3539b3134c2be51146f769f3240b6d1935643d08bf2a820e6800d42132308f" Dec 15 08:45:45 crc kubenswrapper[4876]: I1215 08:45:45.638536 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2f87-account-create-update-hlzht" Dec 15 08:45:45 crc kubenswrapper[4876]: I1215 08:45:45.738556 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-operator-scripts\") pod \"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd\" (UID: \"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd\") " Dec 15 08:45:45 crc kubenswrapper[4876]: I1215 08:45:45.742286 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnpvx\" (UniqueName: \"kubernetes.io/projected/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-kube-api-access-dnpvx\") pod \"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd\" (UID: \"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd\") " Dec 15 08:45:45 crc kubenswrapper[4876]: I1215 08:45:45.743585 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c0c96afc-ef13-43e7-a2c3-93fd949bdfdd" (UID: "c0c96afc-ef13-43e7-a2c3-93fd949bdfdd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:45:45 crc kubenswrapper[4876]: I1215 08:45:45.745446 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:45 crc kubenswrapper[4876]: I1215 08:45:45.748717 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-kube-api-access-dnpvx" (OuterVolumeSpecName: "kube-api-access-dnpvx") pod "c0c96afc-ef13-43e7-a2c3-93fd949bdfdd" (UID: "c0c96afc-ef13-43e7-a2c3-93fd949bdfdd"). InnerVolumeSpecName "kube-api-access-dnpvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:45:45 crc kubenswrapper[4876]: I1215 08:45:45.846803 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnpvx\" (UniqueName: \"kubernetes.io/projected/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd-kube-api-access-dnpvx\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:46 crc kubenswrapper[4876]: I1215 08:45:46.320843 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2f87-account-create-update-hlzht" event={"ID":"c0c96afc-ef13-43e7-a2c3-93fd949bdfdd","Type":"ContainerDied","Data":"6c39c2a62f9feb45357b6547bc2ccf5f85de0b415ca6292ea90c1f339d6e4bfc"} Dec 15 08:45:46 crc kubenswrapper[4876]: I1215 08:45:46.320917 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c39c2a62f9feb45357b6547bc2ccf5f85de0b415ca6292ea90c1f339d6e4bfc" Dec 15 08:45:46 crc kubenswrapper[4876]: I1215 08:45:46.321005 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2f87-account-create-update-hlzht" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.480658 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-bhn67"] Dec 15 08:45:47 crc kubenswrapper[4876]: E1215 08:45:47.481364 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a98bf7d3-593a-47fd-b8ba-70207cc611d4" containerName="mariadb-database-create" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.481382 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a98bf7d3-593a-47fd-b8ba-70207cc611d4" containerName="mariadb-database-create" Dec 15 08:45:47 crc kubenswrapper[4876]: E1215 08:45:47.481411 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0c96afc-ef13-43e7-a2c3-93fd949bdfdd" containerName="mariadb-account-create-update" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.481419 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0c96afc-ef13-43e7-a2c3-93fd949bdfdd" containerName="mariadb-account-create-update" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.481603 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0c96afc-ef13-43e7-a2c3-93fd949bdfdd" containerName="mariadb-account-create-update" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.481626 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a98bf7d3-593a-47fd-b8ba-70207cc611d4" containerName="mariadb-database-create" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.482298 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.485604 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.486354 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.486824 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.487030 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zvrc9" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.488578 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-bhn67"] Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.573468 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-combined-ca-bundle\") pod \"keystone-db-sync-bhn67\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.573639 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-config-data\") pod \"keystone-db-sync-bhn67\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.573695 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdxwf\" (UniqueName: \"kubernetes.io/projected/d8bccbd9-5502-4352-87e6-dc0b1f35b070-kube-api-access-xdxwf\") pod \"keystone-db-sync-bhn67\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.676054 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdxwf\" (UniqueName: \"kubernetes.io/projected/d8bccbd9-5502-4352-87e6-dc0b1f35b070-kube-api-access-xdxwf\") pod \"keystone-db-sync-bhn67\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.676162 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-combined-ca-bundle\") pod \"keystone-db-sync-bhn67\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.676321 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-config-data\") pod \"keystone-db-sync-bhn67\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.681226 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-config-data\") pod \"keystone-db-sync-bhn67\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.682040 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-combined-ca-bundle\") pod \"keystone-db-sync-bhn67\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.693956 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdxwf\" (UniqueName: \"kubernetes.io/projected/d8bccbd9-5502-4352-87e6-dc0b1f35b070-kube-api-access-xdxwf\") pod \"keystone-db-sync-bhn67\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:47 crc kubenswrapper[4876]: I1215 08:45:47.840902 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:48 crc kubenswrapper[4876]: I1215 08:45:48.246804 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-bhn67"] Dec 15 08:45:48 crc kubenswrapper[4876]: I1215 08:45:48.334043 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-bhn67" event={"ID":"d8bccbd9-5502-4352-87e6-dc0b1f35b070","Type":"ContainerStarted","Data":"a6b0cf3c45942fc87eec6d02e8ff07b0fe6ee79489cf1f95794aaa93fd92bc56"} Dec 15 08:45:49 crc kubenswrapper[4876]: I1215 08:45:49.745096 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 15 08:45:54 crc kubenswrapper[4876]: I1215 08:45:54.386837 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-bhn67" event={"ID":"d8bccbd9-5502-4352-87e6-dc0b1f35b070","Type":"ContainerStarted","Data":"fdbece3d9f8a1801e37653081659817f45f88abae24171f33b80152de8678537"} Dec 15 08:45:54 crc kubenswrapper[4876]: I1215 08:45:54.409471 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-bhn67" podStartSLOduration=2.129894728 podStartE2EDuration="7.409445478s" podCreationTimestamp="2025-12-15 08:45:47 +0000 UTC" firstStartedPulling="2025-12-15 08:45:48.262335335 +0000 UTC m=+6873.833478246" lastFinishedPulling="2025-12-15 08:45:53.541886085 +0000 UTC m=+6879.113028996" observedRunningTime="2025-12-15 08:45:54.406800427 +0000 UTC m=+6879.977943378" watchObservedRunningTime="2025-12-15 08:45:54.409445478 +0000 UTC m=+6879.980588419" Dec 15 08:45:55 crc kubenswrapper[4876]: I1215 08:45:55.394775 4876 generic.go:334] "Generic (PLEG): container finished" podID="d8bccbd9-5502-4352-87e6-dc0b1f35b070" containerID="fdbece3d9f8a1801e37653081659817f45f88abae24171f33b80152de8678537" exitCode=0 Dec 15 08:45:55 crc kubenswrapper[4876]: I1215 08:45:55.394814 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-bhn67" event={"ID":"d8bccbd9-5502-4352-87e6-dc0b1f35b070","Type":"ContainerDied","Data":"fdbece3d9f8a1801e37653081659817f45f88abae24171f33b80152de8678537"} Dec 15 08:45:56 crc kubenswrapper[4876]: I1215 08:45:56.823288 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:56 crc kubenswrapper[4876]: I1215 08:45:56.861818 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-combined-ca-bundle\") pod \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " Dec 15 08:45:56 crc kubenswrapper[4876]: I1215 08:45:56.861898 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-config-data\") pod \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " Dec 15 08:45:56 crc kubenswrapper[4876]: I1215 08:45:56.862024 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdxwf\" (UniqueName: \"kubernetes.io/projected/d8bccbd9-5502-4352-87e6-dc0b1f35b070-kube-api-access-xdxwf\") pod \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\" (UID: \"d8bccbd9-5502-4352-87e6-dc0b1f35b070\") " Dec 15 08:45:56 crc kubenswrapper[4876]: I1215 08:45:56.868592 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8bccbd9-5502-4352-87e6-dc0b1f35b070-kube-api-access-xdxwf" (OuterVolumeSpecName: "kube-api-access-xdxwf") pod "d8bccbd9-5502-4352-87e6-dc0b1f35b070" (UID: "d8bccbd9-5502-4352-87e6-dc0b1f35b070"). InnerVolumeSpecName "kube-api-access-xdxwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:45:56 crc kubenswrapper[4876]: I1215 08:45:56.887152 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d8bccbd9-5502-4352-87e6-dc0b1f35b070" (UID: "d8bccbd9-5502-4352-87e6-dc0b1f35b070"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:45:56 crc kubenswrapper[4876]: I1215 08:45:56.906525 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-config-data" (OuterVolumeSpecName: "config-data") pod "d8bccbd9-5502-4352-87e6-dc0b1f35b070" (UID: "d8bccbd9-5502-4352-87e6-dc0b1f35b070"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:45:56 crc kubenswrapper[4876]: I1215 08:45:56.963781 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdxwf\" (UniqueName: \"kubernetes.io/projected/d8bccbd9-5502-4352-87e6-dc0b1f35b070-kube-api-access-xdxwf\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:56 crc kubenswrapper[4876]: I1215 08:45:56.963826 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:56 crc kubenswrapper[4876]: I1215 08:45:56.963841 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8bccbd9-5502-4352-87e6-dc0b1f35b070-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:45:57 crc kubenswrapper[4876]: I1215 08:45:57.416910 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-bhn67" event={"ID":"d8bccbd9-5502-4352-87e6-dc0b1f35b070","Type":"ContainerDied","Data":"a6b0cf3c45942fc87eec6d02e8ff07b0fe6ee79489cf1f95794aaa93fd92bc56"} Dec 15 08:45:57 crc kubenswrapper[4876]: I1215 08:45:57.416948 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-bhn67" Dec 15 08:45:57 crc kubenswrapper[4876]: I1215 08:45:57.416958 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6b0cf3c45942fc87eec6d02e8ff07b0fe6ee79489cf1f95794aaa93fd92bc56" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.092604 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b7658df59-7fg99"] Dec 15 08:45:58 crc kubenswrapper[4876]: E1215 08:45:58.097149 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8bccbd9-5502-4352-87e6-dc0b1f35b070" containerName="keystone-db-sync" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.097182 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8bccbd9-5502-4352-87e6-dc0b1f35b070" containerName="keystone-db-sync" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.097406 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8bccbd9-5502-4352-87e6-dc0b1f35b070" containerName="keystone-db-sync" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.098459 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.101779 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7658df59-7fg99"] Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.164158 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-srjtw"] Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.165587 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.170953 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.171219 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zvrc9" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.171356 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.171579 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.171593 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.175026 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-srjtw"] Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.185835 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-credential-keys\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.187479 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-scripts\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.187679 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-config\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.187700 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-nb\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.187896 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-fernet-keys\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.187937 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bln6\" (UniqueName: \"kubernetes.io/projected/11505a18-69b0-4a6d-bfd6-51d2018d22ef-kube-api-access-5bln6\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.188017 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-dns-svc\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.188125 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-combined-ca-bundle\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.188174 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-sb\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.188220 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-config-data\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.188353 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kzrm\" (UniqueName: \"kubernetes.io/projected/96eb9b6f-cb2a-4d55-8706-2189e804f559-kube-api-access-5kzrm\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.289510 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-config\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.289831 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-nb\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.289952 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-fernet-keys\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.290041 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bln6\" (UniqueName: \"kubernetes.io/projected/11505a18-69b0-4a6d-bfd6-51d2018d22ef-kube-api-access-5bln6\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.290136 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-dns-svc\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.290226 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-combined-ca-bundle\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.290297 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-sb\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.290371 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-config-data\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.290452 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kzrm\" (UniqueName: \"kubernetes.io/projected/96eb9b6f-cb2a-4d55-8706-2189e804f559-kube-api-access-5kzrm\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.290527 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-credential-keys\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.290621 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-scripts\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.290831 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-nb\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.290890 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-config\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.291739 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-sb\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.291769 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-dns-svc\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.296116 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-credential-keys\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.297391 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-combined-ca-bundle\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.305281 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-scripts\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.305959 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-config-data\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.313019 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-fernet-keys\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.313076 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bln6\" (UniqueName: \"kubernetes.io/projected/11505a18-69b0-4a6d-bfd6-51d2018d22ef-kube-api-access-5bln6\") pod \"dnsmasq-dns-5b7658df59-7fg99\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.317082 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kzrm\" (UniqueName: \"kubernetes.io/projected/96eb9b6f-cb2a-4d55-8706-2189e804f559-kube-api-access-5kzrm\") pod \"keystone-bootstrap-srjtw\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.425016 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.494465 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:45:58 crc kubenswrapper[4876]: I1215 08:45:58.976034 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7658df59-7fg99"] Dec 15 08:45:58 crc kubenswrapper[4876]: W1215 08:45:58.981903 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11505a18_69b0_4a6d_bfd6_51d2018d22ef.slice/crio-54b677e96edf41f4f9d52f61bce04729698023df2146c25f1791b696cfd43bcb WatchSource:0}: Error finding container 54b677e96edf41f4f9d52f61bce04729698023df2146c25f1791b696cfd43bcb: Status 404 returned error can't find the container with id 54b677e96edf41f4f9d52f61bce04729698023df2146c25f1791b696cfd43bcb Dec 15 08:45:59 crc kubenswrapper[4876]: I1215 08:45:59.047255 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-srjtw"] Dec 15 08:45:59 crc kubenswrapper[4876]: W1215 08:45:59.050044 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96eb9b6f_cb2a_4d55_8706_2189e804f559.slice/crio-b690c237412e8239eb9eaff73cf8d70ab6a4820113c43463ab8fc012fd9e34fb WatchSource:0}: Error finding container b690c237412e8239eb9eaff73cf8d70ab6a4820113c43463ab8fc012fd9e34fb: Status 404 returned error can't find the container with id b690c237412e8239eb9eaff73cf8d70ab6a4820113c43463ab8fc012fd9e34fb Dec 15 08:45:59 crc kubenswrapper[4876]: I1215 08:45:59.433843 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-srjtw" event={"ID":"96eb9b6f-cb2a-4d55-8706-2189e804f559","Type":"ContainerStarted","Data":"b690c237412e8239eb9eaff73cf8d70ab6a4820113c43463ab8fc012fd9e34fb"} Dec 15 08:45:59 crc kubenswrapper[4876]: I1215 08:45:59.434971 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" event={"ID":"11505a18-69b0-4a6d-bfd6-51d2018d22ef","Type":"ContainerStarted","Data":"54b677e96edf41f4f9d52f61bce04729698023df2146c25f1791b696cfd43bcb"} Dec 15 08:46:00 crc kubenswrapper[4876]: I1215 08:46:00.452840 4876 generic.go:334] "Generic (PLEG): container finished" podID="11505a18-69b0-4a6d-bfd6-51d2018d22ef" containerID="aaae199571f0f1eb69fa8c3d0282094c45213d0010c2fb88d82a32677136402e" exitCode=0 Dec 15 08:46:00 crc kubenswrapper[4876]: I1215 08:46:00.453157 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" event={"ID":"11505a18-69b0-4a6d-bfd6-51d2018d22ef","Type":"ContainerDied","Data":"aaae199571f0f1eb69fa8c3d0282094c45213d0010c2fb88d82a32677136402e"} Dec 15 08:46:00 crc kubenswrapper[4876]: I1215 08:46:00.456965 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-srjtw" event={"ID":"96eb9b6f-cb2a-4d55-8706-2189e804f559","Type":"ContainerStarted","Data":"64c497f594d2a2da96bb3548a931ad72fbe29917889e0da7ef54103d18d34e3a"} Dec 15 08:46:01 crc kubenswrapper[4876]: I1215 08:46:01.467521 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" event={"ID":"11505a18-69b0-4a6d-bfd6-51d2018d22ef","Type":"ContainerStarted","Data":"4d108dd06c477c765ed01422582078be839784d5aa3821ace0e1c548d3daa0a1"} Dec 15 08:46:01 crc kubenswrapper[4876]: I1215 08:46:01.467922 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:46:01 crc kubenswrapper[4876]: I1215 08:46:01.484792 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-srjtw" podStartSLOduration=3.484772437 podStartE2EDuration="3.484772437s" podCreationTimestamp="2025-12-15 08:45:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:46:00.497358822 +0000 UTC m=+6886.068501743" watchObservedRunningTime="2025-12-15 08:46:01.484772437 +0000 UTC m=+6887.055915348" Dec 15 08:46:01 crc kubenswrapper[4876]: I1215 08:46:01.486850 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" podStartSLOduration=3.486841323 podStartE2EDuration="3.486841323s" podCreationTimestamp="2025-12-15 08:45:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:46:01.482821625 +0000 UTC m=+6887.053964556" watchObservedRunningTime="2025-12-15 08:46:01.486841323 +0000 UTC m=+6887.057984264" Dec 15 08:46:03 crc kubenswrapper[4876]: I1215 08:46:03.481348 4876 generic.go:334] "Generic (PLEG): container finished" podID="96eb9b6f-cb2a-4d55-8706-2189e804f559" containerID="64c497f594d2a2da96bb3548a931ad72fbe29917889e0da7ef54103d18d34e3a" exitCode=0 Dec 15 08:46:03 crc kubenswrapper[4876]: I1215 08:46:03.481455 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-srjtw" event={"ID":"96eb9b6f-cb2a-4d55-8706-2189e804f559","Type":"ContainerDied","Data":"64c497f594d2a2da96bb3548a931ad72fbe29917889e0da7ef54103d18d34e3a"} Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.806298 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.820260 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-fernet-keys\") pod \"96eb9b6f-cb2a-4d55-8706-2189e804f559\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.820407 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-combined-ca-bundle\") pod \"96eb9b6f-cb2a-4d55-8706-2189e804f559\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.820508 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-credential-keys\") pod \"96eb9b6f-cb2a-4d55-8706-2189e804f559\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.820627 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kzrm\" (UniqueName: \"kubernetes.io/projected/96eb9b6f-cb2a-4d55-8706-2189e804f559-kube-api-access-5kzrm\") pod \"96eb9b6f-cb2a-4d55-8706-2189e804f559\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.820663 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-config-data\") pod \"96eb9b6f-cb2a-4d55-8706-2189e804f559\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.820695 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-scripts\") pod \"96eb9b6f-cb2a-4d55-8706-2189e804f559\" (UID: \"96eb9b6f-cb2a-4d55-8706-2189e804f559\") " Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.826163 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "96eb9b6f-cb2a-4d55-8706-2189e804f559" (UID: "96eb9b6f-cb2a-4d55-8706-2189e804f559"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.828870 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-scripts" (OuterVolumeSpecName: "scripts") pod "96eb9b6f-cb2a-4d55-8706-2189e804f559" (UID: "96eb9b6f-cb2a-4d55-8706-2189e804f559"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.832272 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "96eb9b6f-cb2a-4d55-8706-2189e804f559" (UID: "96eb9b6f-cb2a-4d55-8706-2189e804f559"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.832334 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96eb9b6f-cb2a-4d55-8706-2189e804f559-kube-api-access-5kzrm" (OuterVolumeSpecName: "kube-api-access-5kzrm") pod "96eb9b6f-cb2a-4d55-8706-2189e804f559" (UID: "96eb9b6f-cb2a-4d55-8706-2189e804f559"). InnerVolumeSpecName "kube-api-access-5kzrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.849385 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96eb9b6f-cb2a-4d55-8706-2189e804f559" (UID: "96eb9b6f-cb2a-4d55-8706-2189e804f559"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.854947 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-config-data" (OuterVolumeSpecName: "config-data") pod "96eb9b6f-cb2a-4d55-8706-2189e804f559" (UID: "96eb9b6f-cb2a-4d55-8706-2189e804f559"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.922376 4876 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.922408 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.922418 4876 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.922427 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kzrm\" (UniqueName: \"kubernetes.io/projected/96eb9b6f-cb2a-4d55-8706-2189e804f559-kube-api-access-5kzrm\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.922435 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:04 crc kubenswrapper[4876]: I1215 08:46:04.922443 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96eb9b6f-cb2a-4d55-8706-2189e804f559-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.497844 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-srjtw" event={"ID":"96eb9b6f-cb2a-4d55-8706-2189e804f559","Type":"ContainerDied","Data":"b690c237412e8239eb9eaff73cf8d70ab6a4820113c43463ab8fc012fd9e34fb"} Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.497908 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-srjtw" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.497890 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b690c237412e8239eb9eaff73cf8d70ab6a4820113c43463ab8fc012fd9e34fb" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.577783 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-srjtw"] Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.583913 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-srjtw"] Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.678967 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-msbfs"] Dec 15 08:46:05 crc kubenswrapper[4876]: E1215 08:46:05.679377 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96eb9b6f-cb2a-4d55-8706-2189e804f559" containerName="keystone-bootstrap" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.679405 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="96eb9b6f-cb2a-4d55-8706-2189e804f559" containerName="keystone-bootstrap" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.679645 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="96eb9b6f-cb2a-4d55-8706-2189e804f559" containerName="keystone-bootstrap" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.680174 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.682268 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.682468 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-zvrc9" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.682615 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.682726 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.682842 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.691585 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-msbfs"] Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.737467 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-credential-keys\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.737518 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-fernet-keys\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.737706 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq7z4\" (UniqueName: \"kubernetes.io/projected/e02a5401-9efc-48a2-b7fa-a3750f1186fe-kube-api-access-kq7z4\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.737790 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-combined-ca-bundle\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.737817 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-config-data\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.738074 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-scripts\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.839480 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq7z4\" (UniqueName: \"kubernetes.io/projected/e02a5401-9efc-48a2-b7fa-a3750f1186fe-kube-api-access-kq7z4\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.839597 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-combined-ca-bundle\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.839631 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-config-data\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.839689 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-scripts\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.839753 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-credential-keys\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.839774 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-fernet-keys\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.844699 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-credential-keys\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.845058 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-combined-ca-bundle\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.845195 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-scripts\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.845684 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-config-data\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.846410 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-fernet-keys\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.859418 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq7z4\" (UniqueName: \"kubernetes.io/projected/e02a5401-9efc-48a2-b7fa-a3750f1186fe-kube-api-access-kq7z4\") pod \"keystone-bootstrap-msbfs\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:05 crc kubenswrapper[4876]: I1215 08:46:05.998180 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:06 crc kubenswrapper[4876]: I1215 08:46:06.447829 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-msbfs"] Dec 15 08:46:06 crc kubenswrapper[4876]: I1215 08:46:06.505932 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-msbfs" event={"ID":"e02a5401-9efc-48a2-b7fa-a3750f1186fe","Type":"ContainerStarted","Data":"e6aa1e5a4b473aebe24121145b32e0aa1950238e922b074b060c03f085a09d94"} Dec 15 08:46:06 crc kubenswrapper[4876]: I1215 08:46:06.717067 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96eb9b6f-cb2a-4d55-8706-2189e804f559" path="/var/lib/kubelet/pods/96eb9b6f-cb2a-4d55-8706-2189e804f559/volumes" Dec 15 08:46:07 crc kubenswrapper[4876]: I1215 08:46:07.513945 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-msbfs" event={"ID":"e02a5401-9efc-48a2-b7fa-a3750f1186fe","Type":"ContainerStarted","Data":"b7f3873b611dd902ada85c97f4b85da8179426f4731d48bfea152ec897008786"} Dec 15 08:46:07 crc kubenswrapper[4876]: I1215 08:46:07.533208 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-msbfs" podStartSLOduration=2.533185489 podStartE2EDuration="2.533185489s" podCreationTimestamp="2025-12-15 08:46:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:46:07.528300078 +0000 UTC m=+6893.099442999" watchObservedRunningTime="2025-12-15 08:46:07.533185489 +0000 UTC m=+6893.104328400" Dec 15 08:46:08 crc kubenswrapper[4876]: I1215 08:46:08.427289 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:46:08 crc kubenswrapper[4876]: I1215 08:46:08.490959 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65575b46ff-vz4zr"] Dec 15 08:46:08 crc kubenswrapper[4876]: I1215 08:46:08.491225 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" podUID="46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" containerName="dnsmasq-dns" containerID="cri-o://ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e" gracePeriod=10 Dec 15 08:46:08 crc kubenswrapper[4876]: I1215 08:46:08.953252 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.114434 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddzrb\" (UniqueName: \"kubernetes.io/projected/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-kube-api-access-ddzrb\") pod \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.114527 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-dns-svc\") pod \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.114606 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-sb\") pod \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.114716 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-config\") pod \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.114772 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-nb\") pod \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\" (UID: \"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd\") " Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.122697 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-kube-api-access-ddzrb" (OuterVolumeSpecName: "kube-api-access-ddzrb") pod "46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" (UID: "46ef7a70-e1b5-4434-8eb5-7883f0ff93cd"). InnerVolumeSpecName "kube-api-access-ddzrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.154573 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" (UID: "46ef7a70-e1b5-4434-8eb5-7883f0ff93cd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.157652 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" (UID: "46ef7a70-e1b5-4434-8eb5-7883f0ff93cd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.159673 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" (UID: "46ef7a70-e1b5-4434-8eb5-7883f0ff93cd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.160838 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-config" (OuterVolumeSpecName: "config") pod "46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" (UID: "46ef7a70-e1b5-4434-8eb5-7883f0ff93cd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.216571 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.216603 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.216614 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddzrb\" (UniqueName: \"kubernetes.io/projected/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-kube-api-access-ddzrb\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.216622 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.216629 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.527788 4876 generic.go:334] "Generic (PLEG): container finished" podID="46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" containerID="ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e" exitCode=0 Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.527853 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.527876 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" event={"ID":"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd","Type":"ContainerDied","Data":"ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e"} Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.527905 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65575b46ff-vz4zr" event={"ID":"46ef7a70-e1b5-4434-8eb5-7883f0ff93cd","Type":"ContainerDied","Data":"ebfe68d42d018076ca0efa9511654ad0efce2eb1198dc830b718bd31ba8e0e7a"} Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.527924 4876 scope.go:117] "RemoveContainer" containerID="ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.530015 4876 generic.go:334] "Generic (PLEG): container finished" podID="e02a5401-9efc-48a2-b7fa-a3750f1186fe" containerID="b7f3873b611dd902ada85c97f4b85da8179426f4731d48bfea152ec897008786" exitCode=0 Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.530068 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-msbfs" event={"ID":"e02a5401-9efc-48a2-b7fa-a3750f1186fe","Type":"ContainerDied","Data":"b7f3873b611dd902ada85c97f4b85da8179426f4731d48bfea152ec897008786"} Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.556267 4876 scope.go:117] "RemoveContainer" containerID="d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.576217 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65575b46ff-vz4zr"] Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.584623 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-65575b46ff-vz4zr"] Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.590349 4876 scope.go:117] "RemoveContainer" containerID="ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e" Dec 15 08:46:09 crc kubenswrapper[4876]: E1215 08:46:09.591170 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e\": container with ID starting with ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e not found: ID does not exist" containerID="ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.591204 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e"} err="failed to get container status \"ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e\": rpc error: code = NotFound desc = could not find container \"ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e\": container with ID starting with ccaa02b525893982b14becdec0f93696e85128837a146569ffc6a073f079d99e not found: ID does not exist" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.591224 4876 scope.go:117] "RemoveContainer" containerID="d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52" Dec 15 08:46:09 crc kubenswrapper[4876]: E1215 08:46:09.592338 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52\": container with ID starting with d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52 not found: ID does not exist" containerID="d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52" Dec 15 08:46:09 crc kubenswrapper[4876]: I1215 08:46:09.592360 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52"} err="failed to get container status \"d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52\": rpc error: code = NotFound desc = could not find container \"d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52\": container with ID starting with d9bbcc8246b4120239d42bc6f197538fbb3b1c1c882d27b8bacc5c25df8c1d52 not found: ID does not exist" Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.721924 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" path="/var/lib/kubelet/pods/46ef7a70-e1b5-4434-8eb5-7883f0ff93cd/volumes" Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.838085 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.940381 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-fernet-keys\") pod \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.940454 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-credential-keys\") pod \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.940494 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-combined-ca-bundle\") pod \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.940615 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kq7z4\" (UniqueName: \"kubernetes.io/projected/e02a5401-9efc-48a2-b7fa-a3750f1186fe-kube-api-access-kq7z4\") pod \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.940647 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-config-data\") pod \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.940761 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-scripts\") pod \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.945665 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e02a5401-9efc-48a2-b7fa-a3750f1186fe" (UID: "e02a5401-9efc-48a2-b7fa-a3750f1186fe"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.945748 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e02a5401-9efc-48a2-b7fa-a3750f1186fe-kube-api-access-kq7z4" (OuterVolumeSpecName: "kube-api-access-kq7z4") pod "e02a5401-9efc-48a2-b7fa-a3750f1186fe" (UID: "e02a5401-9efc-48a2-b7fa-a3750f1186fe"). InnerVolumeSpecName "kube-api-access-kq7z4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.946079 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e02a5401-9efc-48a2-b7fa-a3750f1186fe" (UID: "e02a5401-9efc-48a2-b7fa-a3750f1186fe"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.955319 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-scripts" (OuterVolumeSpecName: "scripts") pod "e02a5401-9efc-48a2-b7fa-a3750f1186fe" (UID: "e02a5401-9efc-48a2-b7fa-a3750f1186fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:46:10 crc kubenswrapper[4876]: E1215 08:46:10.959271 4876 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-config-data podName:e02a5401-9efc-48a2-b7fa-a3750f1186fe nodeName:}" failed. No retries permitted until 2025-12-15 08:46:11.459235619 +0000 UTC m=+6897.030378540 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-config-data") pod "e02a5401-9efc-48a2-b7fa-a3750f1186fe" (UID: "e02a5401-9efc-48a2-b7fa-a3750f1186fe") : error deleting /var/lib/kubelet/pods/e02a5401-9efc-48a2-b7fa-a3750f1186fe/volume-subpaths: remove /var/lib/kubelet/pods/e02a5401-9efc-48a2-b7fa-a3750f1186fe/volume-subpaths: no such file or directory Dec 15 08:46:10 crc kubenswrapper[4876]: I1215 08:46:10.962231 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e02a5401-9efc-48a2-b7fa-a3750f1186fe" (UID: "e02a5401-9efc-48a2-b7fa-a3750f1186fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.042169 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.042206 4876 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.042218 4876 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.042234 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.042249 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kq7z4\" (UniqueName: \"kubernetes.io/projected/e02a5401-9efc-48a2-b7fa-a3750f1186fe-kube-api-access-kq7z4\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.548642 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-config-data\") pod \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\" (UID: \"e02a5401-9efc-48a2-b7fa-a3750f1186fe\") " Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.550535 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-msbfs" event={"ID":"e02a5401-9efc-48a2-b7fa-a3750f1186fe","Type":"ContainerDied","Data":"e6aa1e5a4b473aebe24121145b32e0aa1950238e922b074b060c03f085a09d94"} Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.550576 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6aa1e5a4b473aebe24121145b32e0aa1950238e922b074b060c03f085a09d94" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.550882 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-msbfs" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.552721 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-config-data" (OuterVolumeSpecName: "config-data") pod "e02a5401-9efc-48a2-b7fa-a3750f1186fe" (UID: "e02a5401-9efc-48a2-b7fa-a3750f1186fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.634978 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9776897b9-wfbwj"] Dec 15 08:46:11 crc kubenswrapper[4876]: E1215 08:46:11.635307 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" containerName="init" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.635323 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" containerName="init" Dec 15 08:46:11 crc kubenswrapper[4876]: E1215 08:46:11.635339 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" containerName="dnsmasq-dns" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.635345 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" containerName="dnsmasq-dns" Dec 15 08:46:11 crc kubenswrapper[4876]: E1215 08:46:11.635368 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e02a5401-9efc-48a2-b7fa-a3750f1186fe" containerName="keystone-bootstrap" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.635374 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e02a5401-9efc-48a2-b7fa-a3750f1186fe" containerName="keystone-bootstrap" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.635537 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e02a5401-9efc-48a2-b7fa-a3750f1186fe" containerName="keystone-bootstrap" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.635548 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="46ef7a70-e1b5-4434-8eb5-7883f0ff93cd" containerName="dnsmasq-dns" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.636089 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.650462 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-fernet-keys\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.650623 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-credential-keys\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.650664 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-config-data\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.650696 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-scripts\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.650780 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-combined-ca-bundle\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.650808 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wh9w\" (UniqueName: \"kubernetes.io/projected/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-kube-api-access-4wh9w\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.650879 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02a5401-9efc-48a2-b7fa-a3750f1186fe-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.654194 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9776897b9-wfbwj"] Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.752189 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-combined-ca-bundle\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.752255 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wh9w\" (UniqueName: \"kubernetes.io/projected/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-kube-api-access-4wh9w\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.752335 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-fernet-keys\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.752386 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-credential-keys\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.752414 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-config-data\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.752440 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-scripts\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.755948 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-scripts\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.756459 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-fernet-keys\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.756505 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-config-data\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.756890 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-combined-ca-bundle\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.758086 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-credential-keys\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.769971 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wh9w\" (UniqueName: \"kubernetes.io/projected/d10b248e-69de-4f99-9e4b-afb3c7c1e53e-kube-api-access-4wh9w\") pod \"keystone-9776897b9-wfbwj\" (UID: \"d10b248e-69de-4f99-9e4b-afb3c7c1e53e\") " pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:11 crc kubenswrapper[4876]: I1215 08:46:11.961514 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:12 crc kubenswrapper[4876]: I1215 08:46:12.427180 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9776897b9-wfbwj"] Dec 15 08:46:12 crc kubenswrapper[4876]: I1215 08:46:12.557993 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9776897b9-wfbwj" event={"ID":"d10b248e-69de-4f99-9e4b-afb3c7c1e53e","Type":"ContainerStarted","Data":"298319001f4038d489e7c722dc27045d60f506861e4a747c99cdfcc92cc9f829"} Dec 15 08:46:13 crc kubenswrapper[4876]: I1215 08:46:13.566321 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9776897b9-wfbwj" event={"ID":"d10b248e-69de-4f99-9e4b-afb3c7c1e53e","Type":"ContainerStarted","Data":"4d0480c86fc81778137e1b780f7a8ae751c156271ed983c9c939fc07730ab68d"} Dec 15 08:46:13 crc kubenswrapper[4876]: I1215 08:46:13.566620 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:13 crc kubenswrapper[4876]: I1215 08:46:13.585931 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-9776897b9-wfbwj" podStartSLOduration=2.585910837 podStartE2EDuration="2.585910837s" podCreationTimestamp="2025-12-15 08:46:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:46:13.582094375 +0000 UTC m=+6899.153237296" watchObservedRunningTime="2025-12-15 08:46:13.585910837 +0000 UTC m=+6899.157053748" Dec 15 08:46:43 crc kubenswrapper[4876]: I1215 08:46:43.787887 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-9776897b9-wfbwj" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.098452 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.105944 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.111317 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.111345 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.113970 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-456rt" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.128517 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.260140 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config\") pod \"openstackclient\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.260504 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config-secret\") pod \"openstackclient\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.260556 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78x7k\" (UniqueName: \"kubernetes.io/projected/ac4bc4be-fe90-48ae-ac23-83fb5870105a-kube-api-access-78x7k\") pod \"openstackclient\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.362605 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config-secret\") pod \"openstackclient\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.362690 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78x7k\" (UniqueName: \"kubernetes.io/projected/ac4bc4be-fe90-48ae-ac23-83fb5870105a-kube-api-access-78x7k\") pod \"openstackclient\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.362756 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config\") pod \"openstackclient\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.363721 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config\") pod \"openstackclient\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.369577 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config-secret\") pod \"openstackclient\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.386315 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78x7k\" (UniqueName: \"kubernetes.io/projected/ac4bc4be-fe90-48ae-ac23-83fb5870105a-kube-api-access-78x7k\") pod \"openstackclient\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.434523 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 15 08:46:47 crc kubenswrapper[4876]: I1215 08:46:47.919701 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 15 08:46:48 crc kubenswrapper[4876]: I1215 08:46:48.831084 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ac4bc4be-fe90-48ae-ac23-83fb5870105a","Type":"ContainerStarted","Data":"851e7424666a29fecdefe92dd5ac70e3acd05bbef4a16d6299e8e42d3913de67"} Dec 15 08:46:59 crc kubenswrapper[4876]: I1215 08:46:59.008584 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ac4bc4be-fe90-48ae-ac23-83fb5870105a","Type":"ContainerStarted","Data":"75fddfeed143e85cdbcb6fce3c96effc4face30468aa3851ae1994ec7f4f7191"} Dec 15 08:46:59 crc kubenswrapper[4876]: I1215 08:46:59.034824 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.291084688 podStartE2EDuration="12.03479974s" podCreationTimestamp="2025-12-15 08:46:47 +0000 UTC" firstStartedPulling="2025-12-15 08:46:47.921157367 +0000 UTC m=+6933.492300278" lastFinishedPulling="2025-12-15 08:46:58.664872419 +0000 UTC m=+6944.236015330" observedRunningTime="2025-12-15 08:46:59.026369013 +0000 UTC m=+6944.597511924" watchObservedRunningTime="2025-12-15 08:46:59.03479974 +0000 UTC m=+6944.605942651" Dec 15 08:47:27 crc kubenswrapper[4876]: I1215 08:47:27.323169 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:47:27 crc kubenswrapper[4876]: I1215 08:47:27.323846 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:47:43 crc kubenswrapper[4876]: E1215 08:47:43.085978 4876 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.70:33834->38.102.83.70:35145: write tcp 38.102.83.70:33834->38.102.83.70:35145: write: broken pipe Dec 15 08:47:57 crc kubenswrapper[4876]: I1215 08:47:57.323487 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:47:57 crc kubenswrapper[4876]: I1215 08:47:57.324099 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.364853 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2j6qm"] Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.367013 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.372935 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2j6qm"] Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.470765 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-utilities\") pod \"community-operators-2j6qm\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.470839 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkpll\" (UniqueName: \"kubernetes.io/projected/34f9c9cd-f59a-4d63-87ca-894e2418a113-kube-api-access-nkpll\") pod \"community-operators-2j6qm\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.470946 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-catalog-content\") pod \"community-operators-2j6qm\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.572084 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-catalog-content\") pod \"community-operators-2j6qm\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.572188 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-utilities\") pod \"community-operators-2j6qm\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.572229 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkpll\" (UniqueName: \"kubernetes.io/projected/34f9c9cd-f59a-4d63-87ca-894e2418a113-kube-api-access-nkpll\") pod \"community-operators-2j6qm\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.573382 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-catalog-content\") pod \"community-operators-2j6qm\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.573657 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-utilities\") pod \"community-operators-2j6qm\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.592636 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkpll\" (UniqueName: \"kubernetes.io/projected/34f9c9cd-f59a-4d63-87ca-894e2418a113-kube-api-access-nkpll\") pod \"community-operators-2j6qm\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:58 crc kubenswrapper[4876]: I1215 08:47:58.696570 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:47:59 crc kubenswrapper[4876]: I1215 08:47:59.232696 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2j6qm"] Dec 15 08:47:59 crc kubenswrapper[4876]: I1215 08:47:59.468590 4876 generic.go:334] "Generic (PLEG): container finished" podID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerID="6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d" exitCode=0 Dec 15 08:47:59 crc kubenswrapper[4876]: I1215 08:47:59.468631 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2j6qm" event={"ID":"34f9c9cd-f59a-4d63-87ca-894e2418a113","Type":"ContainerDied","Data":"6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d"} Dec 15 08:47:59 crc kubenswrapper[4876]: I1215 08:47:59.468664 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2j6qm" event={"ID":"34f9c9cd-f59a-4d63-87ca-894e2418a113","Type":"ContainerStarted","Data":"704ea3b931f81953594ccd77efdd749e19a55fb6154e6d2ec87a0e0c6aed1c2f"} Dec 15 08:47:59 crc kubenswrapper[4876]: I1215 08:47:59.477895 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 08:48:01 crc kubenswrapper[4876]: I1215 08:48:01.494623 4876 generic.go:334] "Generic (PLEG): container finished" podID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerID="dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0" exitCode=0 Dec 15 08:48:01 crc kubenswrapper[4876]: I1215 08:48:01.494715 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2j6qm" event={"ID":"34f9c9cd-f59a-4d63-87ca-894e2418a113","Type":"ContainerDied","Data":"dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0"} Dec 15 08:48:02 crc kubenswrapper[4876]: I1215 08:48:02.504216 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2j6qm" event={"ID":"34f9c9cd-f59a-4d63-87ca-894e2418a113","Type":"ContainerStarted","Data":"4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d"} Dec 15 08:48:02 crc kubenswrapper[4876]: I1215 08:48:02.536889 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2j6qm" podStartSLOduration=2.053496416 podStartE2EDuration="4.536872444s" podCreationTimestamp="2025-12-15 08:47:58 +0000 UTC" firstStartedPulling="2025-12-15 08:47:59.470690245 +0000 UTC m=+7005.041833156" lastFinishedPulling="2025-12-15 08:48:01.954066273 +0000 UTC m=+7007.525209184" observedRunningTime="2025-12-15 08:48:02.53151587 +0000 UTC m=+7008.102658781" watchObservedRunningTime="2025-12-15 08:48:02.536872444 +0000 UTC m=+7008.108015355" Dec 15 08:48:08 crc kubenswrapper[4876]: I1215 08:48:08.697590 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:48:08 crc kubenswrapper[4876]: I1215 08:48:08.699307 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:48:08 crc kubenswrapper[4876]: I1215 08:48:08.768177 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:48:09 crc kubenswrapper[4876]: I1215 08:48:09.592707 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:48:09 crc kubenswrapper[4876]: I1215 08:48:09.635287 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2j6qm"] Dec 15 08:48:11 crc kubenswrapper[4876]: I1215 08:48:11.569621 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2j6qm" podUID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerName="registry-server" containerID="cri-o://4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d" gracePeriod=2 Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.056489 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.205579 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkpll\" (UniqueName: \"kubernetes.io/projected/34f9c9cd-f59a-4d63-87ca-894e2418a113-kube-api-access-nkpll\") pod \"34f9c9cd-f59a-4d63-87ca-894e2418a113\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.205984 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-utilities\") pod \"34f9c9cd-f59a-4d63-87ca-894e2418a113\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.206057 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-catalog-content\") pod \"34f9c9cd-f59a-4d63-87ca-894e2418a113\" (UID: \"34f9c9cd-f59a-4d63-87ca-894e2418a113\") " Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.207055 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-utilities" (OuterVolumeSpecName: "utilities") pod "34f9c9cd-f59a-4d63-87ca-894e2418a113" (UID: "34f9c9cd-f59a-4d63-87ca-894e2418a113"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.231176 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34f9c9cd-f59a-4d63-87ca-894e2418a113-kube-api-access-nkpll" (OuterVolumeSpecName: "kube-api-access-nkpll") pod "34f9c9cd-f59a-4d63-87ca-894e2418a113" (UID: "34f9c9cd-f59a-4d63-87ca-894e2418a113"). InnerVolumeSpecName "kube-api-access-nkpll". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.266468 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "34f9c9cd-f59a-4d63-87ca-894e2418a113" (UID: "34f9c9cd-f59a-4d63-87ca-894e2418a113"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.307463 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkpll\" (UniqueName: \"kubernetes.io/projected/34f9c9cd-f59a-4d63-87ca-894e2418a113-kube-api-access-nkpll\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.307502 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.307512 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34f9c9cd-f59a-4d63-87ca-894e2418a113-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.578660 4876 generic.go:334] "Generic (PLEG): container finished" podID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerID="4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d" exitCode=0 Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.578731 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2j6qm" event={"ID":"34f9c9cd-f59a-4d63-87ca-894e2418a113","Type":"ContainerDied","Data":"4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d"} Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.578763 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2j6qm" event={"ID":"34f9c9cd-f59a-4d63-87ca-894e2418a113","Type":"ContainerDied","Data":"704ea3b931f81953594ccd77efdd749e19a55fb6154e6d2ec87a0e0c6aed1c2f"} Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.578788 4876 scope.go:117] "RemoveContainer" containerID="4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.578793 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2j6qm" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.598367 4876 scope.go:117] "RemoveContainer" containerID="dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.622522 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2j6qm"] Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.625493 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2j6qm"] Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.637324 4876 scope.go:117] "RemoveContainer" containerID="6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.665914 4876 scope.go:117] "RemoveContainer" containerID="4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d" Dec 15 08:48:12 crc kubenswrapper[4876]: E1215 08:48:12.666306 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d\": container with ID starting with 4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d not found: ID does not exist" containerID="4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.666342 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d"} err="failed to get container status \"4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d\": rpc error: code = NotFound desc = could not find container \"4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d\": container with ID starting with 4cb2fa42315e674c950f4358d042f4edbcd464d2799e6d4568548f7474dd558d not found: ID does not exist" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.666362 4876 scope.go:117] "RemoveContainer" containerID="dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0" Dec 15 08:48:12 crc kubenswrapper[4876]: E1215 08:48:12.666625 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0\": container with ID starting with dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0 not found: ID does not exist" containerID="dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.666656 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0"} err="failed to get container status \"dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0\": rpc error: code = NotFound desc = could not find container \"dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0\": container with ID starting with dd40db4cf6ecf3ffe02ba793aea0b2454de744471ae9b2e7fb44993ad09a9ba0 not found: ID does not exist" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.666671 4876 scope.go:117] "RemoveContainer" containerID="6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d" Dec 15 08:48:12 crc kubenswrapper[4876]: E1215 08:48:12.667084 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d\": container with ID starting with 6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d not found: ID does not exist" containerID="6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.667122 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d"} err="failed to get container status \"6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d\": rpc error: code = NotFound desc = could not find container \"6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d\": container with ID starting with 6a9c93cb76dcb003a24da16f3d40ae745a3b0d60e498c6b6a0062ad5128b2d1d not found: ID does not exist" Dec 15 08:48:12 crc kubenswrapper[4876]: I1215 08:48:12.717678 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34f9c9cd-f59a-4d63-87ca-894e2418a113" path="/var/lib/kubelet/pods/34f9c9cd-f59a-4d63-87ca-894e2418a113/volumes" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.112693 4876 scope.go:117] "RemoveContainer" containerID="1aa78640eb59af7c2232202f3269e91f03938b2b1aa092ff1fc393733a168580" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.132413 4876 scope.go:117] "RemoveContainer" containerID="387ce04ae0b8f7261e25023a161c1086e9452dec53132192e0d05e966f57d968" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.152559 4876 scope.go:117] "RemoveContainer" containerID="72899f09769291221b8ee6964adf17f6597939d31c509a11722e7d19b797a276" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.833203 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8340-account-create-update-shntv"] Dec 15 08:48:24 crc kubenswrapper[4876]: E1215 08:48:24.833658 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerName="extract-content" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.833681 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerName="extract-content" Dec 15 08:48:24 crc kubenswrapper[4876]: E1215 08:48:24.833694 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerName="extract-utilities" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.833703 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerName="extract-utilities" Dec 15 08:48:24 crc kubenswrapper[4876]: E1215 08:48:24.833729 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerName="registry-server" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.833740 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerName="registry-server" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.833938 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="34f9c9cd-f59a-4d63-87ca-894e2418a113" containerName="registry-server" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.834673 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8340-account-create-update-shntv" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.836592 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.852413 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-vtq2q"] Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.853917 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vtq2q" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.871881 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8340-account-create-update-shntv"] Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.920435 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-vtq2q"] Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.935339 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/197123db-9e54-4e82-be91-7c531f7be3c1-operator-scripts\") pod \"barbican-db-create-vtq2q\" (UID: \"197123db-9e54-4e82-be91-7c531f7be3c1\") " pod="openstack/barbican-db-create-vtq2q" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.935410 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dk8w7\" (UniqueName: \"kubernetes.io/projected/3d29e395-e1dc-4668-8f5d-6e28693c1990-kube-api-access-dk8w7\") pod \"barbican-8340-account-create-update-shntv\" (UID: \"3d29e395-e1dc-4668-8f5d-6e28693c1990\") " pod="openstack/barbican-8340-account-create-update-shntv" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.935503 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d29e395-e1dc-4668-8f5d-6e28693c1990-operator-scripts\") pod \"barbican-8340-account-create-update-shntv\" (UID: \"3d29e395-e1dc-4668-8f5d-6e28693c1990\") " pod="openstack/barbican-8340-account-create-update-shntv" Dec 15 08:48:24 crc kubenswrapper[4876]: I1215 08:48:24.935536 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gvhb\" (UniqueName: \"kubernetes.io/projected/197123db-9e54-4e82-be91-7c531f7be3c1-kube-api-access-2gvhb\") pod \"barbican-db-create-vtq2q\" (UID: \"197123db-9e54-4e82-be91-7c531f7be3c1\") " pod="openstack/barbican-db-create-vtq2q" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.036654 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d29e395-e1dc-4668-8f5d-6e28693c1990-operator-scripts\") pod \"barbican-8340-account-create-update-shntv\" (UID: \"3d29e395-e1dc-4668-8f5d-6e28693c1990\") " pod="openstack/barbican-8340-account-create-update-shntv" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.036708 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gvhb\" (UniqueName: \"kubernetes.io/projected/197123db-9e54-4e82-be91-7c531f7be3c1-kube-api-access-2gvhb\") pod \"barbican-db-create-vtq2q\" (UID: \"197123db-9e54-4e82-be91-7c531f7be3c1\") " pod="openstack/barbican-db-create-vtq2q" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.036779 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/197123db-9e54-4e82-be91-7c531f7be3c1-operator-scripts\") pod \"barbican-db-create-vtq2q\" (UID: \"197123db-9e54-4e82-be91-7c531f7be3c1\") " pod="openstack/barbican-db-create-vtq2q" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.036807 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dk8w7\" (UniqueName: \"kubernetes.io/projected/3d29e395-e1dc-4668-8f5d-6e28693c1990-kube-api-access-dk8w7\") pod \"barbican-8340-account-create-update-shntv\" (UID: \"3d29e395-e1dc-4668-8f5d-6e28693c1990\") " pod="openstack/barbican-8340-account-create-update-shntv" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.037503 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/197123db-9e54-4e82-be91-7c531f7be3c1-operator-scripts\") pod \"barbican-db-create-vtq2q\" (UID: \"197123db-9e54-4e82-be91-7c531f7be3c1\") " pod="openstack/barbican-db-create-vtq2q" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.037520 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d29e395-e1dc-4668-8f5d-6e28693c1990-operator-scripts\") pod \"barbican-8340-account-create-update-shntv\" (UID: \"3d29e395-e1dc-4668-8f5d-6e28693c1990\") " pod="openstack/barbican-8340-account-create-update-shntv" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.056462 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gvhb\" (UniqueName: \"kubernetes.io/projected/197123db-9e54-4e82-be91-7c531f7be3c1-kube-api-access-2gvhb\") pod \"barbican-db-create-vtq2q\" (UID: \"197123db-9e54-4e82-be91-7c531f7be3c1\") " pod="openstack/barbican-db-create-vtq2q" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.059744 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dk8w7\" (UniqueName: \"kubernetes.io/projected/3d29e395-e1dc-4668-8f5d-6e28693c1990-kube-api-access-dk8w7\") pod \"barbican-8340-account-create-update-shntv\" (UID: \"3d29e395-e1dc-4668-8f5d-6e28693c1990\") " pod="openstack/barbican-8340-account-create-update-shntv" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.151748 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8340-account-create-update-shntv" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.179272 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vtq2q" Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.635437 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8340-account-create-update-shntv"] Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.685175 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-vtq2q"] Dec 15 08:48:25 crc kubenswrapper[4876]: I1215 08:48:25.690376 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8340-account-create-update-shntv" event={"ID":"3d29e395-e1dc-4668-8f5d-6e28693c1990","Type":"ContainerStarted","Data":"8d95f94c64fa1d715d89f2cb8368b7237fd6164ea1dd5f92c099373af0901250"} Dec 15 08:48:25 crc kubenswrapper[4876]: W1215 08:48:25.693948 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod197123db_9e54_4e82_be91_7c531f7be3c1.slice/crio-92e25f5ec4d3f9b2624ad411173cf0f1bd63201890d809d2273b0bee6d7c575a WatchSource:0}: Error finding container 92e25f5ec4d3f9b2624ad411173cf0f1bd63201890d809d2273b0bee6d7c575a: Status 404 returned error can't find the container with id 92e25f5ec4d3f9b2624ad411173cf0f1bd63201890d809d2273b0bee6d7c575a Dec 15 08:48:26 crc kubenswrapper[4876]: I1215 08:48:26.699005 4876 generic.go:334] "Generic (PLEG): container finished" podID="197123db-9e54-4e82-be91-7c531f7be3c1" containerID="34121a2a97e30f7666a49f517158304048f0f49dd044c794c0664b330eb16489" exitCode=0 Dec 15 08:48:26 crc kubenswrapper[4876]: I1215 08:48:26.699062 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-vtq2q" event={"ID":"197123db-9e54-4e82-be91-7c531f7be3c1","Type":"ContainerDied","Data":"34121a2a97e30f7666a49f517158304048f0f49dd044c794c0664b330eb16489"} Dec 15 08:48:26 crc kubenswrapper[4876]: I1215 08:48:26.699350 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-vtq2q" event={"ID":"197123db-9e54-4e82-be91-7c531f7be3c1","Type":"ContainerStarted","Data":"92e25f5ec4d3f9b2624ad411173cf0f1bd63201890d809d2273b0bee6d7c575a"} Dec 15 08:48:26 crc kubenswrapper[4876]: I1215 08:48:26.700748 4876 generic.go:334] "Generic (PLEG): container finished" podID="3d29e395-e1dc-4668-8f5d-6e28693c1990" containerID="11f5811760297b22c6d27f33c565be3bc8328c4da29920e6b3029722b866ddfe" exitCode=0 Dec 15 08:48:26 crc kubenswrapper[4876]: I1215 08:48:26.700791 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8340-account-create-update-shntv" event={"ID":"3d29e395-e1dc-4668-8f5d-6e28693c1990","Type":"ContainerDied","Data":"11f5811760297b22c6d27f33c565be3bc8328c4da29920e6b3029722b866ddfe"} Dec 15 08:48:27 crc kubenswrapper[4876]: I1215 08:48:27.322991 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:48:27 crc kubenswrapper[4876]: I1215 08:48:27.323350 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:48:27 crc kubenswrapper[4876]: I1215 08:48:27.323397 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 08:48:27 crc kubenswrapper[4876]: I1215 08:48:27.324008 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 08:48:27 crc kubenswrapper[4876]: I1215 08:48:27.324066 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" gracePeriod=600 Dec 15 08:48:27 crc kubenswrapper[4876]: E1215 08:48:27.463989 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:48:27 crc kubenswrapper[4876]: I1215 08:48:27.709085 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" exitCode=0 Dec 15 08:48:27 crc kubenswrapper[4876]: I1215 08:48:27.709125 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0"} Dec 15 08:48:27 crc kubenswrapper[4876]: I1215 08:48:27.709168 4876 scope.go:117] "RemoveContainer" containerID="aba1a98a0d3176406b1734d4bed65f4bfe3fed20e881d715632d44c039a22cbd" Dec 15 08:48:27 crc kubenswrapper[4876]: I1215 08:48:27.709779 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:48:27 crc kubenswrapper[4876]: E1215 08:48:27.710016 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.074622 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8340-account-create-update-shntv" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.082688 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vtq2q" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.190932 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d29e395-e1dc-4668-8f5d-6e28693c1990-operator-scripts\") pod \"3d29e395-e1dc-4668-8f5d-6e28693c1990\" (UID: \"3d29e395-e1dc-4668-8f5d-6e28693c1990\") " Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.191062 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gvhb\" (UniqueName: \"kubernetes.io/projected/197123db-9e54-4e82-be91-7c531f7be3c1-kube-api-access-2gvhb\") pod \"197123db-9e54-4e82-be91-7c531f7be3c1\" (UID: \"197123db-9e54-4e82-be91-7c531f7be3c1\") " Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.191124 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/197123db-9e54-4e82-be91-7c531f7be3c1-operator-scripts\") pod \"197123db-9e54-4e82-be91-7c531f7be3c1\" (UID: \"197123db-9e54-4e82-be91-7c531f7be3c1\") " Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.191176 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dk8w7\" (UniqueName: \"kubernetes.io/projected/3d29e395-e1dc-4668-8f5d-6e28693c1990-kube-api-access-dk8w7\") pod \"3d29e395-e1dc-4668-8f5d-6e28693c1990\" (UID: \"3d29e395-e1dc-4668-8f5d-6e28693c1990\") " Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.193117 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d29e395-e1dc-4668-8f5d-6e28693c1990-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3d29e395-e1dc-4668-8f5d-6e28693c1990" (UID: "3d29e395-e1dc-4668-8f5d-6e28693c1990"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.193097 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/197123db-9e54-4e82-be91-7c531f7be3c1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "197123db-9e54-4e82-be91-7c531f7be3c1" (UID: "197123db-9e54-4e82-be91-7c531f7be3c1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.197246 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/197123db-9e54-4e82-be91-7c531f7be3c1-kube-api-access-2gvhb" (OuterVolumeSpecName: "kube-api-access-2gvhb") pod "197123db-9e54-4e82-be91-7c531f7be3c1" (UID: "197123db-9e54-4e82-be91-7c531f7be3c1"). InnerVolumeSpecName "kube-api-access-2gvhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.197674 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d29e395-e1dc-4668-8f5d-6e28693c1990-kube-api-access-dk8w7" (OuterVolumeSpecName: "kube-api-access-dk8w7") pod "3d29e395-e1dc-4668-8f5d-6e28693c1990" (UID: "3d29e395-e1dc-4668-8f5d-6e28693c1990"). InnerVolumeSpecName "kube-api-access-dk8w7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.293707 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d29e395-e1dc-4668-8f5d-6e28693c1990-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.293758 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gvhb\" (UniqueName: \"kubernetes.io/projected/197123db-9e54-4e82-be91-7c531f7be3c1-kube-api-access-2gvhb\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.293771 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/197123db-9e54-4e82-be91-7c531f7be3c1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.293779 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dk8w7\" (UniqueName: \"kubernetes.io/projected/3d29e395-e1dc-4668-8f5d-6e28693c1990-kube-api-access-dk8w7\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.728522 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-vtq2q" event={"ID":"197123db-9e54-4e82-be91-7c531f7be3c1","Type":"ContainerDied","Data":"92e25f5ec4d3f9b2624ad411173cf0f1bd63201890d809d2273b0bee6d7c575a"} Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.728597 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92e25f5ec4d3f9b2624ad411173cf0f1bd63201890d809d2273b0bee6d7c575a" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.728564 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-vtq2q" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.732418 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8340-account-create-update-shntv" event={"ID":"3d29e395-e1dc-4668-8f5d-6e28693c1990","Type":"ContainerDied","Data":"8d95f94c64fa1d715d89f2cb8368b7237fd6164ea1dd5f92c099373af0901250"} Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.732459 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d95f94c64fa1d715d89f2cb8368b7237fd6164ea1dd5f92c099373af0901250" Dec 15 08:48:28 crc kubenswrapper[4876]: I1215 08:48:28.732523 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8340-account-create-update-shntv" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.181489 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-8r24z"] Dec 15 08:48:30 crc kubenswrapper[4876]: E1215 08:48:30.181894 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="197123db-9e54-4e82-be91-7c531f7be3c1" containerName="mariadb-database-create" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.181913 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="197123db-9e54-4e82-be91-7c531f7be3c1" containerName="mariadb-database-create" Dec 15 08:48:30 crc kubenswrapper[4876]: E1215 08:48:30.181963 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d29e395-e1dc-4668-8f5d-6e28693c1990" containerName="mariadb-account-create-update" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.181973 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d29e395-e1dc-4668-8f5d-6e28693c1990" containerName="mariadb-account-create-update" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.182205 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="197123db-9e54-4e82-be91-7c531f7be3c1" containerName="mariadb-database-create" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.182232 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d29e395-e1dc-4668-8f5d-6e28693c1990" containerName="mariadb-account-create-update" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.182755 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.185385 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-mnk6s" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.185809 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.193358 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-8r24z"] Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.323088 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmwzb\" (UniqueName: \"kubernetes.io/projected/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-kube-api-access-cmwzb\") pod \"barbican-db-sync-8r24z\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.323299 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-combined-ca-bundle\") pod \"barbican-db-sync-8r24z\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.323346 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-db-sync-config-data\") pod \"barbican-db-sync-8r24z\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.424475 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmwzb\" (UniqueName: \"kubernetes.io/projected/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-kube-api-access-cmwzb\") pod \"barbican-db-sync-8r24z\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.424863 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-combined-ca-bundle\") pod \"barbican-db-sync-8r24z\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.424880 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-db-sync-config-data\") pod \"barbican-db-sync-8r24z\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.431799 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-db-sync-config-data\") pod \"barbican-db-sync-8r24z\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.438308 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-combined-ca-bundle\") pod \"barbican-db-sync-8r24z\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.445145 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmwzb\" (UniqueName: \"kubernetes.io/projected/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-kube-api-access-cmwzb\") pod \"barbican-db-sync-8r24z\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.508469 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:30 crc kubenswrapper[4876]: I1215 08:48:30.939237 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-8r24z"] Dec 15 08:48:31 crc kubenswrapper[4876]: I1215 08:48:31.754082 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8r24z" event={"ID":"83f17d8f-d2c6-4755-af05-832c7eaa4bcc","Type":"ContainerStarted","Data":"a81c8cc954df9a0b3214df33bf96e55ed52bc908051e9f02732393fe7f408223"} Dec 15 08:48:37 crc kubenswrapper[4876]: I1215 08:48:37.801454 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8r24z" event={"ID":"83f17d8f-d2c6-4755-af05-832c7eaa4bcc","Type":"ContainerStarted","Data":"dc733a890652e872bd97e21c9c6a0b252cd47fa7178409e67a97a5dee6c941bd"} Dec 15 08:48:37 crc kubenswrapper[4876]: I1215 08:48:37.820827 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-8r24z" podStartSLOduration=1.8316659880000001 podStartE2EDuration="7.820777996s" podCreationTimestamp="2025-12-15 08:48:30 +0000 UTC" firstStartedPulling="2025-12-15 08:48:30.949182583 +0000 UTC m=+7036.520325494" lastFinishedPulling="2025-12-15 08:48:36.938294601 +0000 UTC m=+7042.509437502" observedRunningTime="2025-12-15 08:48:37.815348701 +0000 UTC m=+7043.386491622" watchObservedRunningTime="2025-12-15 08:48:37.820777996 +0000 UTC m=+7043.391920907" Dec 15 08:48:39 crc kubenswrapper[4876]: I1215 08:48:39.819319 4876 generic.go:334] "Generic (PLEG): container finished" podID="83f17d8f-d2c6-4755-af05-832c7eaa4bcc" containerID="dc733a890652e872bd97e21c9c6a0b252cd47fa7178409e67a97a5dee6c941bd" exitCode=0 Dec 15 08:48:39 crc kubenswrapper[4876]: I1215 08:48:39.819374 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8r24z" event={"ID":"83f17d8f-d2c6-4755-af05-832c7eaa4bcc","Type":"ContainerDied","Data":"dc733a890652e872bd97e21c9c6a0b252cd47fa7178409e67a97a5dee6c941bd"} Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.114533 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.295931 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmwzb\" (UniqueName: \"kubernetes.io/projected/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-kube-api-access-cmwzb\") pod \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.296508 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-db-sync-config-data\") pod \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.296791 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-combined-ca-bundle\") pod \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\" (UID: \"83f17d8f-d2c6-4755-af05-832c7eaa4bcc\") " Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.302459 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "83f17d8f-d2c6-4755-af05-832c7eaa4bcc" (UID: "83f17d8f-d2c6-4755-af05-832c7eaa4bcc"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.304001 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-kube-api-access-cmwzb" (OuterVolumeSpecName: "kube-api-access-cmwzb") pod "83f17d8f-d2c6-4755-af05-832c7eaa4bcc" (UID: "83f17d8f-d2c6-4755-af05-832c7eaa4bcc"). InnerVolumeSpecName "kube-api-access-cmwzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.322637 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83f17d8f-d2c6-4755-af05-832c7eaa4bcc" (UID: "83f17d8f-d2c6-4755-af05-832c7eaa4bcc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.398552 4876 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.398847 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.398923 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmwzb\" (UniqueName: \"kubernetes.io/projected/83f17d8f-d2c6-4755-af05-832c7eaa4bcc-kube-api-access-cmwzb\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.705180 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:48:41 crc kubenswrapper[4876]: E1215 08:48:41.705608 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.835903 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-8r24z" event={"ID":"83f17d8f-d2c6-4755-af05-832c7eaa4bcc","Type":"ContainerDied","Data":"a81c8cc954df9a0b3214df33bf96e55ed52bc908051e9f02732393fe7f408223"} Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.836233 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a81c8cc954df9a0b3214df33bf96e55ed52bc908051e9f02732393fe7f408223" Dec 15 08:48:41 crc kubenswrapper[4876]: I1215 08:48:41.835965 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-8r24z" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.052133 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-c776c4f89-jznbk"] Dec 15 08:48:42 crc kubenswrapper[4876]: E1215 08:48:42.052497 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83f17d8f-d2c6-4755-af05-832c7eaa4bcc" containerName="barbican-db-sync" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.052518 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="83f17d8f-d2c6-4755-af05-832c7eaa4bcc" containerName="barbican-db-sync" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.052677 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="83f17d8f-d2c6-4755-af05-832c7eaa4bcc" containerName="barbican-db-sync" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.053601 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.055671 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.055904 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.058019 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-mnk6s" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.069829 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-c776c4f89-jznbk"] Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.082220 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-57f9dd55d6-zbz7c"] Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.083763 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.089334 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.108050 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-57f9dd55d6-zbz7c"] Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.109765 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc70e06c-5856-4e75-b2d4-8560e7a443d8-combined-ca-bundle\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.110047 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14199da7-b1e2-4729-8a0c-0b94c7166885-combined-ca-bundle\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.110190 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc70e06c-5856-4e75-b2d4-8560e7a443d8-config-data\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.110347 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14199da7-b1e2-4729-8a0c-0b94c7166885-config-data-custom\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.110502 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq69d\" (UniqueName: \"kubernetes.io/projected/fc70e06c-5856-4e75-b2d4-8560e7a443d8-kube-api-access-wq69d\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.110680 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14199da7-b1e2-4729-8a0c-0b94c7166885-logs\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.110891 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14199da7-b1e2-4729-8a0c-0b94c7166885-config-data\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.111027 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc70e06c-5856-4e75-b2d4-8560e7a443d8-logs\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.113259 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9j4h\" (UniqueName: \"kubernetes.io/projected/14199da7-b1e2-4729-8a0c-0b94c7166885-kube-api-access-d9j4h\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.113554 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc70e06c-5856-4e75-b2d4-8560e7a443d8-config-data-custom\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.173508 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-644987d9dc-k6rdf"] Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.175274 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.187476 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-644987d9dc-k6rdf"] Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.215692 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc70e06c-5856-4e75-b2d4-8560e7a443d8-config-data-custom\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.215777 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc70e06c-5856-4e75-b2d4-8560e7a443d8-combined-ca-bundle\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.215810 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-dns-svc\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.215866 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14199da7-b1e2-4729-8a0c-0b94c7166885-combined-ca-bundle\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.215894 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc70e06c-5856-4e75-b2d4-8560e7a443d8-config-data\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.215924 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14199da7-b1e2-4729-8a0c-0b94c7166885-config-data-custom\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.215955 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-config\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.215983 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq69d\" (UniqueName: \"kubernetes.io/projected/fc70e06c-5856-4e75-b2d4-8560e7a443d8-kube-api-access-wq69d\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.216024 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14199da7-b1e2-4729-8a0c-0b94c7166885-logs\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.216061 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftc8l\" (UniqueName: \"kubernetes.io/projected/0476a74d-c469-49d5-8955-522e00ef8a35-kube-api-access-ftc8l\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.216118 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14199da7-b1e2-4729-8a0c-0b94c7166885-config-data\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.216160 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-sb\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.216183 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc70e06c-5856-4e75-b2d4-8560e7a443d8-logs\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.216207 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9j4h\" (UniqueName: \"kubernetes.io/projected/14199da7-b1e2-4729-8a0c-0b94c7166885-kube-api-access-d9j4h\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.216234 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-nb\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.219447 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc70e06c-5856-4e75-b2d4-8560e7a443d8-logs\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.220372 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14199da7-b1e2-4729-8a0c-0b94c7166885-logs\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.221203 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14199da7-b1e2-4729-8a0c-0b94c7166885-combined-ca-bundle\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.221687 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc70e06c-5856-4e75-b2d4-8560e7a443d8-combined-ca-bundle\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.225081 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc70e06c-5856-4e75-b2d4-8560e7a443d8-config-data-custom\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.228828 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc70e06c-5856-4e75-b2d4-8560e7a443d8-config-data\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.229369 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14199da7-b1e2-4729-8a0c-0b94c7166885-config-data-custom\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.241181 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq69d\" (UniqueName: \"kubernetes.io/projected/fc70e06c-5856-4e75-b2d4-8560e7a443d8-kube-api-access-wq69d\") pod \"barbican-worker-c776c4f89-jznbk\" (UID: \"fc70e06c-5856-4e75-b2d4-8560e7a443d8\") " pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.242349 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9j4h\" (UniqueName: \"kubernetes.io/projected/14199da7-b1e2-4729-8a0c-0b94c7166885-kube-api-access-d9j4h\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.260943 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14199da7-b1e2-4729-8a0c-0b94c7166885-config-data\") pod \"barbican-keystone-listener-57f9dd55d6-zbz7c\" (UID: \"14199da7-b1e2-4729-8a0c-0b94c7166885\") " pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.281953 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-844649956b-xbt9v"] Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.284191 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.293172 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-844649956b-xbt9v"] Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.295294 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.317284 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-sb\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.317332 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-nb\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.317363 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c92d69ad-8051-4a8d-a271-9af1d845ae73-logs\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.317416 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2l2j\" (UniqueName: \"kubernetes.io/projected/c92d69ad-8051-4a8d-a271-9af1d845ae73-kube-api-access-v2l2j\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.317436 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-dns-svc\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.317462 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c92d69ad-8051-4a8d-a271-9af1d845ae73-config-data-custom\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.317516 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c92d69ad-8051-4a8d-a271-9af1d845ae73-combined-ca-bundle\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.317541 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-config\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.317570 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c92d69ad-8051-4a8d-a271-9af1d845ae73-config-data\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.317603 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftc8l\" (UniqueName: \"kubernetes.io/projected/0476a74d-c469-49d5-8955-522e00ef8a35-kube-api-access-ftc8l\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.318287 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-dns-svc\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.318380 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-nb\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.318620 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-config\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.318804 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-sb\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.334253 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftc8l\" (UniqueName: \"kubernetes.io/projected/0476a74d-c469-49d5-8955-522e00ef8a35-kube-api-access-ftc8l\") pod \"dnsmasq-dns-644987d9dc-k6rdf\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.370410 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-c776c4f89-jznbk" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.407724 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.419550 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c92d69ad-8051-4a8d-a271-9af1d845ae73-config-data\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.419641 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c92d69ad-8051-4a8d-a271-9af1d845ae73-logs\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.419688 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2l2j\" (UniqueName: \"kubernetes.io/projected/c92d69ad-8051-4a8d-a271-9af1d845ae73-kube-api-access-v2l2j\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.419717 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c92d69ad-8051-4a8d-a271-9af1d845ae73-config-data-custom\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.419753 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c92d69ad-8051-4a8d-a271-9af1d845ae73-combined-ca-bundle\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.420583 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c92d69ad-8051-4a8d-a271-9af1d845ae73-logs\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.423587 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c92d69ad-8051-4a8d-a271-9af1d845ae73-config-data-custom\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.426400 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c92d69ad-8051-4a8d-a271-9af1d845ae73-config-data\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.427425 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c92d69ad-8051-4a8d-a271-9af1d845ae73-combined-ca-bundle\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.448800 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2l2j\" (UniqueName: \"kubernetes.io/projected/c92d69ad-8051-4a8d-a271-9af1d845ae73-kube-api-access-v2l2j\") pod \"barbican-api-844649956b-xbt9v\" (UID: \"c92d69ad-8051-4a8d-a271-9af1d845ae73\") " pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.496687 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.676797 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.954463 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-57f9dd55d6-zbz7c"] Dec 15 08:48:42 crc kubenswrapper[4876]: I1215 08:48:42.964970 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-c776c4f89-jznbk"] Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.077692 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-644987d9dc-k6rdf"] Dec 15 08:48:43 crc kubenswrapper[4876]: W1215 08:48:43.080755 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0476a74d_c469_49d5_8955_522e00ef8a35.slice/crio-1ce227d6cfa253c928c2b58503acd38d1ab5964799895a26d42a025fe2168aa1 WatchSource:0}: Error finding container 1ce227d6cfa253c928c2b58503acd38d1ab5964799895a26d42a025fe2168aa1: Status 404 returned error can't find the container with id 1ce227d6cfa253c928c2b58503acd38d1ab5964799895a26d42a025fe2168aa1 Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.195313 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-844649956b-xbt9v"] Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.858570 4876 generic.go:334] "Generic (PLEG): container finished" podID="0476a74d-c469-49d5-8955-522e00ef8a35" containerID="f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731" exitCode=0 Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.858908 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" event={"ID":"0476a74d-c469-49d5-8955-522e00ef8a35","Type":"ContainerDied","Data":"f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731"} Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.858934 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" event={"ID":"0476a74d-c469-49d5-8955-522e00ef8a35","Type":"ContainerStarted","Data":"1ce227d6cfa253c928c2b58503acd38d1ab5964799895a26d42a025fe2168aa1"} Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.861987 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-c776c4f89-jznbk" event={"ID":"fc70e06c-5856-4e75-b2d4-8560e7a443d8","Type":"ContainerStarted","Data":"7a7ea1f9244b7a4c96ab8a6f3d532a6c48c54a53705260bae7ee484c8f3e02a5"} Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.865509 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" event={"ID":"14199da7-b1e2-4729-8a0c-0b94c7166885","Type":"ContainerStarted","Data":"a29aaa15baaabb763a82c86f56ab83a9d86dfbe5097dccd696867364e9417af5"} Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.868520 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-844649956b-xbt9v" event={"ID":"c92d69ad-8051-4a8d-a271-9af1d845ae73","Type":"ContainerStarted","Data":"c72a044db643b36dda172f272ca94c28bdeaa3b427ef96c3523dc4bbfce4e0b2"} Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.868563 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-844649956b-xbt9v" event={"ID":"c92d69ad-8051-4a8d-a271-9af1d845ae73","Type":"ContainerStarted","Data":"5fec09df25fe6e4ea1f1516e192dff17228e692c75e5019f7e3b3c638fc5b670"} Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.868573 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-844649956b-xbt9v" event={"ID":"c92d69ad-8051-4a8d-a271-9af1d845ae73","Type":"ContainerStarted","Data":"01427c28496ff34cc61582ad27e44cb1b1d5965a1fceb946ab8c01a6e40df086"} Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.868830 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.868890 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:43 crc kubenswrapper[4876]: I1215 08:48:43.915796 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-844649956b-xbt9v" podStartSLOduration=1.9157773809999998 podStartE2EDuration="1.915777381s" podCreationTimestamp="2025-12-15 08:48:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:48:43.907049736 +0000 UTC m=+7049.478192637" watchObservedRunningTime="2025-12-15 08:48:43.915777381 +0000 UTC m=+7049.486920302" Dec 15 08:48:44 crc kubenswrapper[4876]: I1215 08:48:44.877783 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" event={"ID":"0476a74d-c469-49d5-8955-522e00ef8a35","Type":"ContainerStarted","Data":"e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57"} Dec 15 08:48:44 crc kubenswrapper[4876]: I1215 08:48:44.904751 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" podStartSLOduration=2.904730307 podStartE2EDuration="2.904730307s" podCreationTimestamp="2025-12-15 08:48:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:48:44.898725026 +0000 UTC m=+7050.469867937" watchObservedRunningTime="2025-12-15 08:48:44.904730307 +0000 UTC m=+7050.475873228" Dec 15 08:48:45 crc kubenswrapper[4876]: I1215 08:48:45.887071 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" event={"ID":"14199da7-b1e2-4729-8a0c-0b94c7166885","Type":"ContainerStarted","Data":"f33f984a54d3c25935260717bfb257ef1f5db7e3f5a9474edcb79d8a4d582641"} Dec 15 08:48:45 crc kubenswrapper[4876]: I1215 08:48:45.887681 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" event={"ID":"14199da7-b1e2-4729-8a0c-0b94c7166885","Type":"ContainerStarted","Data":"56070032c7e9c7111513554e42637f405ef538d942a92e13f446049c70404c28"} Dec 15 08:48:45 crc kubenswrapper[4876]: I1215 08:48:45.888830 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-c776c4f89-jznbk" event={"ID":"fc70e06c-5856-4e75-b2d4-8560e7a443d8","Type":"ContainerStarted","Data":"ff426ce9a221fd83738d9d2d50b209da87ba5e01b2b85ce7c8cbcdf046ab53af"} Dec 15 08:48:45 crc kubenswrapper[4876]: I1215 08:48:45.888924 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-c776c4f89-jznbk" event={"ID":"fc70e06c-5856-4e75-b2d4-8560e7a443d8","Type":"ContainerStarted","Data":"ab2e2e7baa6e2ece39cc6acc1c9ae0274b1b8a63cdbab036b05ba162a7fd4935"} Dec 15 08:48:45 crc kubenswrapper[4876]: I1215 08:48:45.889041 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:45 crc kubenswrapper[4876]: I1215 08:48:45.906775 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-57f9dd55d6-zbz7c" podStartSLOduration=1.713447725 podStartE2EDuration="3.906751096s" podCreationTimestamp="2025-12-15 08:48:42 +0000 UTC" firstStartedPulling="2025-12-15 08:48:42.971166166 +0000 UTC m=+7048.542309067" lastFinishedPulling="2025-12-15 08:48:45.164469537 +0000 UTC m=+7050.735612438" observedRunningTime="2025-12-15 08:48:45.906074717 +0000 UTC m=+7051.477217628" watchObservedRunningTime="2025-12-15 08:48:45.906751096 +0000 UTC m=+7051.477894007" Dec 15 08:48:45 crc kubenswrapper[4876]: I1215 08:48:45.933839 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-c776c4f89-jznbk" podStartSLOduration=1.754697802 podStartE2EDuration="3.933813783s" podCreationTimestamp="2025-12-15 08:48:42 +0000 UTC" firstStartedPulling="2025-12-15 08:48:42.983465056 +0000 UTC m=+7048.554607957" lastFinishedPulling="2025-12-15 08:48:45.162581027 +0000 UTC m=+7050.733723938" observedRunningTime="2025-12-15 08:48:45.924666386 +0000 UTC m=+7051.495809307" watchObservedRunningTime="2025-12-15 08:48:45.933813783 +0000 UTC m=+7051.504956714" Dec 15 08:48:50 crc kubenswrapper[4876]: I1215 08:48:50.255312 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-844649956b-xbt9v" podUID="c92d69ad-8051-4a8d-a271-9af1d845ae73" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 15 08:48:52 crc kubenswrapper[4876]: I1215 08:48:52.498374 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:48:52 crc kubenswrapper[4876]: I1215 08:48:52.573253 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7658df59-7fg99"] Dec 15 08:48:52 crc kubenswrapper[4876]: I1215 08:48:52.573551 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" podUID="11505a18-69b0-4a6d-bfd6-51d2018d22ef" containerName="dnsmasq-dns" containerID="cri-o://4d108dd06c477c765ed01422582078be839784d5aa3821ace0e1c548d3daa0a1" gracePeriod=10 Dec 15 08:48:52 crc kubenswrapper[4876]: I1215 08:48:52.944359 4876 generic.go:334] "Generic (PLEG): container finished" podID="11505a18-69b0-4a6d-bfd6-51d2018d22ef" containerID="4d108dd06c477c765ed01422582078be839784d5aa3821ace0e1c548d3daa0a1" exitCode=0 Dec 15 08:48:52 crc kubenswrapper[4876]: I1215 08:48:52.944678 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" event={"ID":"11505a18-69b0-4a6d-bfd6-51d2018d22ef","Type":"ContainerDied","Data":"4d108dd06c477c765ed01422582078be839784d5aa3821ace0e1c548d3daa0a1"} Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.075534 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.079917 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-sb\") pod \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.080024 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-config\") pod \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.080059 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bln6\" (UniqueName: \"kubernetes.io/projected/11505a18-69b0-4a6d-bfd6-51d2018d22ef-kube-api-access-5bln6\") pod \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.080194 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-dns-svc\") pod \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.080265 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-nb\") pod \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\" (UID: \"11505a18-69b0-4a6d-bfd6-51d2018d22ef\") " Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.089854 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11505a18-69b0-4a6d-bfd6-51d2018d22ef-kube-api-access-5bln6" (OuterVolumeSpecName: "kube-api-access-5bln6") pod "11505a18-69b0-4a6d-bfd6-51d2018d22ef" (UID: "11505a18-69b0-4a6d-bfd6-51d2018d22ef"). InnerVolumeSpecName "kube-api-access-5bln6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.140021 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "11505a18-69b0-4a6d-bfd6-51d2018d22ef" (UID: "11505a18-69b0-4a6d-bfd6-51d2018d22ef"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.161063 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "11505a18-69b0-4a6d-bfd6-51d2018d22ef" (UID: "11505a18-69b0-4a6d-bfd6-51d2018d22ef"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.177624 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-config" (OuterVolumeSpecName: "config") pod "11505a18-69b0-4a6d-bfd6-51d2018d22ef" (UID: "11505a18-69b0-4a6d-bfd6-51d2018d22ef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.181855 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.181889 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.181899 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.181909 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bln6\" (UniqueName: \"kubernetes.io/projected/11505a18-69b0-4a6d-bfd6-51d2018d22ef-kube-api-access-5bln6\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.208878 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "11505a18-69b0-4a6d-bfd6-51d2018d22ef" (UID: "11505a18-69b0-4a6d-bfd6-51d2018d22ef"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.283410 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/11505a18-69b0-4a6d-bfd6-51d2018d22ef-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.953214 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" event={"ID":"11505a18-69b0-4a6d-bfd6-51d2018d22ef","Type":"ContainerDied","Data":"54b677e96edf41f4f9d52f61bce04729698023df2146c25f1791b696cfd43bcb"} Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.953264 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7658df59-7fg99" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.953572 4876 scope.go:117] "RemoveContainer" containerID="4d108dd06c477c765ed01422582078be839784d5aa3821ace0e1c548d3daa0a1" Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.996944 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7658df59-7fg99"] Dec 15 08:48:53 crc kubenswrapper[4876]: I1215 08:48:53.997171 4876 scope.go:117] "RemoveContainer" containerID="aaae199571f0f1eb69fa8c3d0282094c45213d0010c2fb88d82a32677136402e" Dec 15 08:48:54 crc kubenswrapper[4876]: I1215 08:48:54.005238 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b7658df59-7fg99"] Dec 15 08:48:54 crc kubenswrapper[4876]: I1215 08:48:54.343760 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:54 crc kubenswrapper[4876]: I1215 08:48:54.402855 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-844649956b-xbt9v" Dec 15 08:48:54 crc kubenswrapper[4876]: I1215 08:48:54.716885 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11505a18-69b0-4a6d-bfd6-51d2018d22ef" path="/var/lib/kubelet/pods/11505a18-69b0-4a6d-bfd6-51d2018d22ef/volumes" Dec 15 08:48:55 crc kubenswrapper[4876]: I1215 08:48:55.705917 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:48:55 crc kubenswrapper[4876]: E1215 08:48:55.706723 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:49:00 crc kubenswrapper[4876]: I1215 08:49:00.864412 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-qv5gc"] Dec 15 08:49:00 crc kubenswrapper[4876]: E1215 08:49:00.865283 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11505a18-69b0-4a6d-bfd6-51d2018d22ef" containerName="dnsmasq-dns" Dec 15 08:49:00 crc kubenswrapper[4876]: I1215 08:49:00.865301 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="11505a18-69b0-4a6d-bfd6-51d2018d22ef" containerName="dnsmasq-dns" Dec 15 08:49:00 crc kubenswrapper[4876]: E1215 08:49:00.865338 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11505a18-69b0-4a6d-bfd6-51d2018d22ef" containerName="init" Dec 15 08:49:00 crc kubenswrapper[4876]: I1215 08:49:00.865345 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="11505a18-69b0-4a6d-bfd6-51d2018d22ef" containerName="init" Dec 15 08:49:00 crc kubenswrapper[4876]: I1215 08:49:00.865537 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="11505a18-69b0-4a6d-bfd6-51d2018d22ef" containerName="dnsmasq-dns" Dec 15 08:49:00 crc kubenswrapper[4876]: I1215 08:49:00.866212 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qv5gc" Dec 15 08:49:00 crc kubenswrapper[4876]: I1215 08:49:00.873654 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qv5gc"] Dec 15 08:49:00 crc kubenswrapper[4876]: I1215 08:49:00.976544 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-e591-account-create-update-skxc6"] Dec 15 08:49:00 crc kubenswrapper[4876]: I1215 08:49:00.977938 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e591-account-create-update-skxc6" Dec 15 08:49:00 crc kubenswrapper[4876]: I1215 08:49:00.981457 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 15 08:49:00 crc kubenswrapper[4876]: I1215 08:49:00.990628 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e591-account-create-update-skxc6"] Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.033533 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvkj4\" (UniqueName: \"kubernetes.io/projected/057d3d8d-8e37-4305-82ec-b0e64b327c56-kube-api-access-cvkj4\") pod \"neutron-db-create-qv5gc\" (UID: \"057d3d8d-8e37-4305-82ec-b0e64b327c56\") " pod="openstack/neutron-db-create-qv5gc" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.033654 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/057d3d8d-8e37-4305-82ec-b0e64b327c56-operator-scripts\") pod \"neutron-db-create-qv5gc\" (UID: \"057d3d8d-8e37-4305-82ec-b0e64b327c56\") " pod="openstack/neutron-db-create-qv5gc" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.134730 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75kvj\" (UniqueName: \"kubernetes.io/projected/e43b8605-b772-4173-9e18-9b9439fa45e3-kube-api-access-75kvj\") pod \"neutron-e591-account-create-update-skxc6\" (UID: \"e43b8605-b772-4173-9e18-9b9439fa45e3\") " pod="openstack/neutron-e591-account-create-update-skxc6" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.134800 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e43b8605-b772-4173-9e18-9b9439fa45e3-operator-scripts\") pod \"neutron-e591-account-create-update-skxc6\" (UID: \"e43b8605-b772-4173-9e18-9b9439fa45e3\") " pod="openstack/neutron-e591-account-create-update-skxc6" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.134841 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvkj4\" (UniqueName: \"kubernetes.io/projected/057d3d8d-8e37-4305-82ec-b0e64b327c56-kube-api-access-cvkj4\") pod \"neutron-db-create-qv5gc\" (UID: \"057d3d8d-8e37-4305-82ec-b0e64b327c56\") " pod="openstack/neutron-db-create-qv5gc" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.134927 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/057d3d8d-8e37-4305-82ec-b0e64b327c56-operator-scripts\") pod \"neutron-db-create-qv5gc\" (UID: \"057d3d8d-8e37-4305-82ec-b0e64b327c56\") " pod="openstack/neutron-db-create-qv5gc" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.135863 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/057d3d8d-8e37-4305-82ec-b0e64b327c56-operator-scripts\") pod \"neutron-db-create-qv5gc\" (UID: \"057d3d8d-8e37-4305-82ec-b0e64b327c56\") " pod="openstack/neutron-db-create-qv5gc" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.160261 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvkj4\" (UniqueName: \"kubernetes.io/projected/057d3d8d-8e37-4305-82ec-b0e64b327c56-kube-api-access-cvkj4\") pod \"neutron-db-create-qv5gc\" (UID: \"057d3d8d-8e37-4305-82ec-b0e64b327c56\") " pod="openstack/neutron-db-create-qv5gc" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.192182 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qv5gc" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.236635 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75kvj\" (UniqueName: \"kubernetes.io/projected/e43b8605-b772-4173-9e18-9b9439fa45e3-kube-api-access-75kvj\") pod \"neutron-e591-account-create-update-skxc6\" (UID: \"e43b8605-b772-4173-9e18-9b9439fa45e3\") " pod="openstack/neutron-e591-account-create-update-skxc6" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.236692 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e43b8605-b772-4173-9e18-9b9439fa45e3-operator-scripts\") pod \"neutron-e591-account-create-update-skxc6\" (UID: \"e43b8605-b772-4173-9e18-9b9439fa45e3\") " pod="openstack/neutron-e591-account-create-update-skxc6" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.237494 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e43b8605-b772-4173-9e18-9b9439fa45e3-operator-scripts\") pod \"neutron-e591-account-create-update-skxc6\" (UID: \"e43b8605-b772-4173-9e18-9b9439fa45e3\") " pod="openstack/neutron-e591-account-create-update-skxc6" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.259722 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75kvj\" (UniqueName: \"kubernetes.io/projected/e43b8605-b772-4173-9e18-9b9439fa45e3-kube-api-access-75kvj\") pod \"neutron-e591-account-create-update-skxc6\" (UID: \"e43b8605-b772-4173-9e18-9b9439fa45e3\") " pod="openstack/neutron-e591-account-create-update-skxc6" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.301549 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e591-account-create-update-skxc6" Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.665459 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-qv5gc"] Dec 15 08:49:01 crc kubenswrapper[4876]: I1215 08:49:01.776582 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e591-account-create-update-skxc6"] Dec 15 08:49:01 crc kubenswrapper[4876]: W1215 08:49:01.783497 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode43b8605_b772_4173_9e18_9b9439fa45e3.slice/crio-5f9475f3d23bc37151d26017e4fff48f33a90a1f522e121814ed7dd4d14b2bda WatchSource:0}: Error finding container 5f9475f3d23bc37151d26017e4fff48f33a90a1f522e121814ed7dd4d14b2bda: Status 404 returned error can't find the container with id 5f9475f3d23bc37151d26017e4fff48f33a90a1f522e121814ed7dd4d14b2bda Dec 15 08:49:02 crc kubenswrapper[4876]: I1215 08:49:02.019538 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e591-account-create-update-skxc6" event={"ID":"e43b8605-b772-4173-9e18-9b9439fa45e3","Type":"ContainerStarted","Data":"33dd809b89f1bfe605f93f05022ff7a891bd1d14a3b297976f6c0aa5d01202cc"} Dec 15 08:49:02 crc kubenswrapper[4876]: I1215 08:49:02.019609 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e591-account-create-update-skxc6" event={"ID":"e43b8605-b772-4173-9e18-9b9439fa45e3","Type":"ContainerStarted","Data":"5f9475f3d23bc37151d26017e4fff48f33a90a1f522e121814ed7dd4d14b2bda"} Dec 15 08:49:02 crc kubenswrapper[4876]: I1215 08:49:02.021245 4876 generic.go:334] "Generic (PLEG): container finished" podID="057d3d8d-8e37-4305-82ec-b0e64b327c56" containerID="dc4a820d75ff6692477b255b33ff62ade3561aa2ecbb69442ba67fd6fefcc954" exitCode=0 Dec 15 08:49:02 crc kubenswrapper[4876]: I1215 08:49:02.021330 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qv5gc" event={"ID":"057d3d8d-8e37-4305-82ec-b0e64b327c56","Type":"ContainerDied","Data":"dc4a820d75ff6692477b255b33ff62ade3561aa2ecbb69442ba67fd6fefcc954"} Dec 15 08:49:02 crc kubenswrapper[4876]: I1215 08:49:02.021381 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qv5gc" event={"ID":"057d3d8d-8e37-4305-82ec-b0e64b327c56","Type":"ContainerStarted","Data":"28bfa31dbc8ab2a63eed9c3bf1786b207c8c1ff2a9aaf426b45abf8a34ccd291"} Dec 15 08:49:02 crc kubenswrapper[4876]: I1215 08:49:02.031314 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-e591-account-create-update-skxc6" podStartSLOduration=2.031291538 podStartE2EDuration="2.031291538s" podCreationTimestamp="2025-12-15 08:49:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:49:02.029776328 +0000 UTC m=+7067.600919239" watchObservedRunningTime="2025-12-15 08:49:02.031291538 +0000 UTC m=+7067.602434449" Dec 15 08:49:03 crc kubenswrapper[4876]: I1215 08:49:03.030634 4876 generic.go:334] "Generic (PLEG): container finished" podID="e43b8605-b772-4173-9e18-9b9439fa45e3" containerID="33dd809b89f1bfe605f93f05022ff7a891bd1d14a3b297976f6c0aa5d01202cc" exitCode=0 Dec 15 08:49:03 crc kubenswrapper[4876]: I1215 08:49:03.030736 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e591-account-create-update-skxc6" event={"ID":"e43b8605-b772-4173-9e18-9b9439fa45e3","Type":"ContainerDied","Data":"33dd809b89f1bfe605f93f05022ff7a891bd1d14a3b297976f6c0aa5d01202cc"} Dec 15 08:49:03 crc kubenswrapper[4876]: I1215 08:49:03.351941 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qv5gc" Dec 15 08:49:03 crc kubenswrapper[4876]: I1215 08:49:03.483005 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/057d3d8d-8e37-4305-82ec-b0e64b327c56-operator-scripts\") pod \"057d3d8d-8e37-4305-82ec-b0e64b327c56\" (UID: \"057d3d8d-8e37-4305-82ec-b0e64b327c56\") " Dec 15 08:49:03 crc kubenswrapper[4876]: I1215 08:49:03.483142 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvkj4\" (UniqueName: \"kubernetes.io/projected/057d3d8d-8e37-4305-82ec-b0e64b327c56-kube-api-access-cvkj4\") pod \"057d3d8d-8e37-4305-82ec-b0e64b327c56\" (UID: \"057d3d8d-8e37-4305-82ec-b0e64b327c56\") " Dec 15 08:49:03 crc kubenswrapper[4876]: I1215 08:49:03.484618 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/057d3d8d-8e37-4305-82ec-b0e64b327c56-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "057d3d8d-8e37-4305-82ec-b0e64b327c56" (UID: "057d3d8d-8e37-4305-82ec-b0e64b327c56"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:49:03 crc kubenswrapper[4876]: I1215 08:49:03.488805 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/057d3d8d-8e37-4305-82ec-b0e64b327c56-kube-api-access-cvkj4" (OuterVolumeSpecName: "kube-api-access-cvkj4") pod "057d3d8d-8e37-4305-82ec-b0e64b327c56" (UID: "057d3d8d-8e37-4305-82ec-b0e64b327c56"). InnerVolumeSpecName "kube-api-access-cvkj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:49:03 crc kubenswrapper[4876]: I1215 08:49:03.585375 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/057d3d8d-8e37-4305-82ec-b0e64b327c56-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:03 crc kubenswrapper[4876]: I1215 08:49:03.585666 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvkj4\" (UniqueName: \"kubernetes.io/projected/057d3d8d-8e37-4305-82ec-b0e64b327c56-kube-api-access-cvkj4\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:04 crc kubenswrapper[4876]: I1215 08:49:04.042366 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-qv5gc" Dec 15 08:49:04 crc kubenswrapper[4876]: I1215 08:49:04.046299 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-qv5gc" event={"ID":"057d3d8d-8e37-4305-82ec-b0e64b327c56","Type":"ContainerDied","Data":"28bfa31dbc8ab2a63eed9c3bf1786b207c8c1ff2a9aaf426b45abf8a34ccd291"} Dec 15 08:49:04 crc kubenswrapper[4876]: I1215 08:49:04.046448 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28bfa31dbc8ab2a63eed9c3bf1786b207c8c1ff2a9aaf426b45abf8a34ccd291" Dec 15 08:49:04 crc kubenswrapper[4876]: I1215 08:49:04.347016 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e591-account-create-update-skxc6" Dec 15 08:49:04 crc kubenswrapper[4876]: I1215 08:49:04.504297 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75kvj\" (UniqueName: \"kubernetes.io/projected/e43b8605-b772-4173-9e18-9b9439fa45e3-kube-api-access-75kvj\") pod \"e43b8605-b772-4173-9e18-9b9439fa45e3\" (UID: \"e43b8605-b772-4173-9e18-9b9439fa45e3\") " Dec 15 08:49:04 crc kubenswrapper[4876]: I1215 08:49:04.504391 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e43b8605-b772-4173-9e18-9b9439fa45e3-operator-scripts\") pod \"e43b8605-b772-4173-9e18-9b9439fa45e3\" (UID: \"e43b8605-b772-4173-9e18-9b9439fa45e3\") " Dec 15 08:49:04 crc kubenswrapper[4876]: I1215 08:49:04.505001 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e43b8605-b772-4173-9e18-9b9439fa45e3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e43b8605-b772-4173-9e18-9b9439fa45e3" (UID: "e43b8605-b772-4173-9e18-9b9439fa45e3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:49:04 crc kubenswrapper[4876]: I1215 08:49:04.507831 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e43b8605-b772-4173-9e18-9b9439fa45e3-kube-api-access-75kvj" (OuterVolumeSpecName: "kube-api-access-75kvj") pod "e43b8605-b772-4173-9e18-9b9439fa45e3" (UID: "e43b8605-b772-4173-9e18-9b9439fa45e3"). InnerVolumeSpecName "kube-api-access-75kvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:49:04 crc kubenswrapper[4876]: I1215 08:49:04.606217 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75kvj\" (UniqueName: \"kubernetes.io/projected/e43b8605-b772-4173-9e18-9b9439fa45e3-kube-api-access-75kvj\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:04 crc kubenswrapper[4876]: I1215 08:49:04.606534 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e43b8605-b772-4173-9e18-9b9439fa45e3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:05 crc kubenswrapper[4876]: I1215 08:49:05.053284 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e591-account-create-update-skxc6" event={"ID":"e43b8605-b772-4173-9e18-9b9439fa45e3","Type":"ContainerDied","Data":"5f9475f3d23bc37151d26017e4fff48f33a90a1f522e121814ed7dd4d14b2bda"} Dec 15 08:49:05 crc kubenswrapper[4876]: I1215 08:49:05.053334 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f9475f3d23bc37151d26017e4fff48f33a90a1f522e121814ed7dd4d14b2bda" Dec 15 08:49:05 crc kubenswrapper[4876]: I1215 08:49:05.053343 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e591-account-create-update-skxc6" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.314418 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-pf7pp"] Dec 15 08:49:06 crc kubenswrapper[4876]: E1215 08:49:06.315171 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e43b8605-b772-4173-9e18-9b9439fa45e3" containerName="mariadb-account-create-update" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.315202 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e43b8605-b772-4173-9e18-9b9439fa45e3" containerName="mariadb-account-create-update" Dec 15 08:49:06 crc kubenswrapper[4876]: E1215 08:49:06.315218 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="057d3d8d-8e37-4305-82ec-b0e64b327c56" containerName="mariadb-database-create" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.315223 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="057d3d8d-8e37-4305-82ec-b0e64b327c56" containerName="mariadb-database-create" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.315422 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="057d3d8d-8e37-4305-82ec-b0e64b327c56" containerName="mariadb-database-create" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.315453 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e43b8605-b772-4173-9e18-9b9439fa45e3" containerName="mariadb-account-create-update" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.316018 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.318388 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.318448 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.318448 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-pnv7h" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.322806 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-pf7pp"] Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.441504 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktff6\" (UniqueName: \"kubernetes.io/projected/3298a876-f36d-4d28-8203-beec177942e8-kube-api-access-ktff6\") pod \"neutron-db-sync-pf7pp\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.441618 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-config\") pod \"neutron-db-sync-pf7pp\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.441690 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-combined-ca-bundle\") pod \"neutron-db-sync-pf7pp\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.543481 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-config\") pod \"neutron-db-sync-pf7pp\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.543591 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-combined-ca-bundle\") pod \"neutron-db-sync-pf7pp\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.543654 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktff6\" (UniqueName: \"kubernetes.io/projected/3298a876-f36d-4d28-8203-beec177942e8-kube-api-access-ktff6\") pod \"neutron-db-sync-pf7pp\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.550080 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-config\") pod \"neutron-db-sync-pf7pp\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.559751 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-combined-ca-bundle\") pod \"neutron-db-sync-pf7pp\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.572761 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktff6\" (UniqueName: \"kubernetes.io/projected/3298a876-f36d-4d28-8203-beec177942e8-kube-api-access-ktff6\") pod \"neutron-db-sync-pf7pp\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:06 crc kubenswrapper[4876]: I1215 08:49:06.636391 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:07 crc kubenswrapper[4876]: I1215 08:49:07.116536 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-pf7pp"] Dec 15 08:49:07 crc kubenswrapper[4876]: I1215 08:49:07.706158 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:49:07 crc kubenswrapper[4876]: E1215 08:49:07.706772 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:49:08 crc kubenswrapper[4876]: I1215 08:49:08.088598 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pf7pp" event={"ID":"3298a876-f36d-4d28-8203-beec177942e8","Type":"ContainerStarted","Data":"dcd679b38ab4d7a47a0d07234daa4da9c877934d55720fe9f80c2c80e06bdafe"} Dec 15 08:49:08 crc kubenswrapper[4876]: I1215 08:49:08.088646 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pf7pp" event={"ID":"3298a876-f36d-4d28-8203-beec177942e8","Type":"ContainerStarted","Data":"8e16dc72170414bdd0c1234e02ee8ee4108c5b044aadf1dd4512a3ed73672b33"} Dec 15 08:49:08 crc kubenswrapper[4876]: I1215 08:49:08.107674 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-pf7pp" podStartSLOduration=2.107657742 podStartE2EDuration="2.107657742s" podCreationTimestamp="2025-12-15 08:49:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:49:08.103947422 +0000 UTC m=+7073.675090333" watchObservedRunningTime="2025-12-15 08:49:08.107657742 +0000 UTC m=+7073.678800653" Dec 15 08:49:12 crc kubenswrapper[4876]: I1215 08:49:12.119248 4876 generic.go:334] "Generic (PLEG): container finished" podID="3298a876-f36d-4d28-8203-beec177942e8" containerID="dcd679b38ab4d7a47a0d07234daa4da9c877934d55720fe9f80c2c80e06bdafe" exitCode=0 Dec 15 08:49:12 crc kubenswrapper[4876]: I1215 08:49:12.120799 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pf7pp" event={"ID":"3298a876-f36d-4d28-8203-beec177942e8","Type":"ContainerDied","Data":"dcd679b38ab4d7a47a0d07234daa4da9c877934d55720fe9f80c2c80e06bdafe"} Dec 15 08:49:13 crc kubenswrapper[4876]: I1215 08:49:13.455762 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:13 crc kubenswrapper[4876]: I1215 08:49:13.494570 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-config\") pod \"3298a876-f36d-4d28-8203-beec177942e8\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " Dec 15 08:49:13 crc kubenswrapper[4876]: I1215 08:49:13.494702 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktff6\" (UniqueName: \"kubernetes.io/projected/3298a876-f36d-4d28-8203-beec177942e8-kube-api-access-ktff6\") pod \"3298a876-f36d-4d28-8203-beec177942e8\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " Dec 15 08:49:13 crc kubenswrapper[4876]: I1215 08:49:13.494760 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-combined-ca-bundle\") pod \"3298a876-f36d-4d28-8203-beec177942e8\" (UID: \"3298a876-f36d-4d28-8203-beec177942e8\") " Dec 15 08:49:13 crc kubenswrapper[4876]: I1215 08:49:13.500286 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3298a876-f36d-4d28-8203-beec177942e8-kube-api-access-ktff6" (OuterVolumeSpecName: "kube-api-access-ktff6") pod "3298a876-f36d-4d28-8203-beec177942e8" (UID: "3298a876-f36d-4d28-8203-beec177942e8"). InnerVolumeSpecName "kube-api-access-ktff6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:49:13 crc kubenswrapper[4876]: I1215 08:49:13.525681 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-config" (OuterVolumeSpecName: "config") pod "3298a876-f36d-4d28-8203-beec177942e8" (UID: "3298a876-f36d-4d28-8203-beec177942e8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:49:13 crc kubenswrapper[4876]: I1215 08:49:13.529493 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3298a876-f36d-4d28-8203-beec177942e8" (UID: "3298a876-f36d-4d28-8203-beec177942e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:49:13 crc kubenswrapper[4876]: I1215 08:49:13.596620 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:13 crc kubenswrapper[4876]: I1215 08:49:13.596663 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktff6\" (UniqueName: \"kubernetes.io/projected/3298a876-f36d-4d28-8203-beec177942e8-kube-api-access-ktff6\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:13 crc kubenswrapper[4876]: I1215 08:49:13.596676 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3298a876-f36d-4d28-8203-beec177942e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.138828 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pf7pp" event={"ID":"3298a876-f36d-4d28-8203-beec177942e8","Type":"ContainerDied","Data":"8e16dc72170414bdd0c1234e02ee8ee4108c5b044aadf1dd4512a3ed73672b33"} Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.138868 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e16dc72170414bdd0c1234e02ee8ee4108c5b044aadf1dd4512a3ed73672b33" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.138925 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pf7pp" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.375038 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-645f7b94c5-nn8px"] Dec 15 08:49:14 crc kubenswrapper[4876]: E1215 08:49:14.375497 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3298a876-f36d-4d28-8203-beec177942e8" containerName="neutron-db-sync" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.375522 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3298a876-f36d-4d28-8203-beec177942e8" containerName="neutron-db-sync" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.375726 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3298a876-f36d-4d28-8203-beec177942e8" containerName="neutron-db-sync" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.376895 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.407380 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxrcj\" (UniqueName: \"kubernetes.io/projected/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-kube-api-access-xxrcj\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.407743 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-sb\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.407770 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-nb\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.407827 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-dns-svc\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.407883 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-config\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.421796 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-645f7b94c5-nn8px"] Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.508525 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-config\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.508615 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxrcj\" (UniqueName: \"kubernetes.io/projected/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-kube-api-access-xxrcj\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.508674 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-sb\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.508692 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-nb\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.509169 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-dns-svc\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.509645 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-nb\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.509987 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-dns-svc\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.510177 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-config\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.510707 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-sb\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.538903 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxrcj\" (UniqueName: \"kubernetes.io/projected/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-kube-api-access-xxrcj\") pod \"dnsmasq-dns-645f7b94c5-nn8px\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.657862 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6799547699-m5p7w"] Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.659585 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.663762 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-pnv7h" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.663928 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.664083 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.670691 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6799547699-m5p7w"] Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.699201 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.711893 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsqqz\" (UniqueName: \"kubernetes.io/projected/cdafcf42-6822-412c-8511-d6b45c9e6a62-kube-api-access-hsqqz\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.711992 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdafcf42-6822-412c-8511-d6b45c9e6a62-combined-ca-bundle\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.712048 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cdafcf42-6822-412c-8511-d6b45c9e6a62-config\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.712125 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cdafcf42-6822-412c-8511-d6b45c9e6a62-httpd-config\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.813311 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cdafcf42-6822-412c-8511-d6b45c9e6a62-config\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.813639 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cdafcf42-6822-412c-8511-d6b45c9e6a62-httpd-config\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.813774 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsqqz\" (UniqueName: \"kubernetes.io/projected/cdafcf42-6822-412c-8511-d6b45c9e6a62-kube-api-access-hsqqz\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.816268 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdafcf42-6822-412c-8511-d6b45c9e6a62-combined-ca-bundle\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.817008 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.817337 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.834398 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/cdafcf42-6822-412c-8511-d6b45c9e6a62-config\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.840177 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdafcf42-6822-412c-8511-d6b45c9e6a62-combined-ca-bundle\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.840804 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cdafcf42-6822-412c-8511-d6b45c9e6a62-httpd-config\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:14 crc kubenswrapper[4876]: I1215 08:49:14.845185 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsqqz\" (UniqueName: \"kubernetes.io/projected/cdafcf42-6822-412c-8511-d6b45c9e6a62-kube-api-access-hsqqz\") pod \"neutron-6799547699-m5p7w\" (UID: \"cdafcf42-6822-412c-8511-d6b45c9e6a62\") " pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:15 crc kubenswrapper[4876]: I1215 08:49:15.001282 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-pnv7h" Dec 15 08:49:15 crc kubenswrapper[4876]: I1215 08:49:15.009441 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:15 crc kubenswrapper[4876]: I1215 08:49:15.189621 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-645f7b94c5-nn8px"] Dec 15 08:49:15 crc kubenswrapper[4876]: I1215 08:49:15.587717 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6799547699-m5p7w"] Dec 15 08:49:16 crc kubenswrapper[4876]: I1215 08:49:16.156127 4876 generic.go:334] "Generic (PLEG): container finished" podID="8f5586db-a167-4c4a-b009-bbd8a0d3b13d" containerID="996e96fcefcb5e5deee4331b158e43d691875b65813ccff62caf654f2a22adf1" exitCode=0 Dec 15 08:49:16 crc kubenswrapper[4876]: I1215 08:49:16.156216 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" event={"ID":"8f5586db-a167-4c4a-b009-bbd8a0d3b13d","Type":"ContainerDied","Data":"996e96fcefcb5e5deee4331b158e43d691875b65813ccff62caf654f2a22adf1"} Dec 15 08:49:16 crc kubenswrapper[4876]: I1215 08:49:16.156483 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" event={"ID":"8f5586db-a167-4c4a-b009-bbd8a0d3b13d","Type":"ContainerStarted","Data":"183ff1f5b8c013f7a9d6308c619245da275d421de78f6e8889f9dd067d8b2924"} Dec 15 08:49:16 crc kubenswrapper[4876]: I1215 08:49:16.158288 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6799547699-m5p7w" event={"ID":"cdafcf42-6822-412c-8511-d6b45c9e6a62","Type":"ContainerStarted","Data":"ed472882cbf2d6718ecade94def62d37e10b932f7b424d3b106ed9eab88f6a61"} Dec 15 08:49:16 crc kubenswrapper[4876]: I1215 08:49:16.158328 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6799547699-m5p7w" event={"ID":"cdafcf42-6822-412c-8511-d6b45c9e6a62","Type":"ContainerStarted","Data":"fb4d86be1a5e4579060d254f321672382397672e6baa224a246b564aac095fbb"} Dec 15 08:49:16 crc kubenswrapper[4876]: I1215 08:49:16.158338 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6799547699-m5p7w" event={"ID":"cdafcf42-6822-412c-8511-d6b45c9e6a62","Type":"ContainerStarted","Data":"873b92c930c6bd2e7b0f389f96296a1e6cb3ba542a16c258f8299b0d31c6a679"} Dec 15 08:49:16 crc kubenswrapper[4876]: I1215 08:49:16.158612 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:16 crc kubenswrapper[4876]: I1215 08:49:16.200000 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6799547699-m5p7w" podStartSLOduration=2.19997675 podStartE2EDuration="2.19997675s" podCreationTimestamp="2025-12-15 08:49:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:49:16.197460433 +0000 UTC m=+7081.768603364" watchObservedRunningTime="2025-12-15 08:49:16.19997675 +0000 UTC m=+7081.771119661" Dec 15 08:49:17 crc kubenswrapper[4876]: I1215 08:49:17.170942 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" event={"ID":"8f5586db-a167-4c4a-b009-bbd8a0d3b13d","Type":"ContainerStarted","Data":"df1a0f71cc96e015f0804100e51ce582a52340415e6e9dd5e4c6bad83bda408a"} Dec 15 08:49:17 crc kubenswrapper[4876]: I1215 08:49:17.189180 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" podStartSLOduration=3.189164763 podStartE2EDuration="3.189164763s" podCreationTimestamp="2025-12-15 08:49:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:49:17.187948591 +0000 UTC m=+7082.759091502" watchObservedRunningTime="2025-12-15 08:49:17.189164763 +0000 UTC m=+7082.760307684" Dec 15 08:49:18 crc kubenswrapper[4876]: I1215 08:49:18.179854 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:18 crc kubenswrapper[4876]: I1215 08:49:18.706824 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:49:18 crc kubenswrapper[4876]: E1215 08:49:18.707004 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:49:24 crc kubenswrapper[4876]: I1215 08:49:24.702294 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:49:24 crc kubenswrapper[4876]: I1215 08:49:24.757645 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-644987d9dc-k6rdf"] Dec 15 08:49:24 crc kubenswrapper[4876]: I1215 08:49:24.757885 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" podUID="0476a74d-c469-49d5-8955-522e00ef8a35" containerName="dnsmasq-dns" containerID="cri-o://e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57" gracePeriod=10 Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.226851 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.253836 4876 generic.go:334] "Generic (PLEG): container finished" podID="0476a74d-c469-49d5-8955-522e00ef8a35" containerID="e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57" exitCode=0 Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.253854 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.253870 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" event={"ID":"0476a74d-c469-49d5-8955-522e00ef8a35","Type":"ContainerDied","Data":"e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57"} Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.253934 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644987d9dc-k6rdf" event={"ID":"0476a74d-c469-49d5-8955-522e00ef8a35","Type":"ContainerDied","Data":"1ce227d6cfa253c928c2b58503acd38d1ab5964799895a26d42a025fe2168aa1"} Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.253958 4876 scope.go:117] "RemoveContainer" containerID="e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.279222 4876 scope.go:117] "RemoveContainer" containerID="f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.323344 4876 scope.go:117] "RemoveContainer" containerID="e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57" Dec 15 08:49:25 crc kubenswrapper[4876]: E1215 08:49:25.323821 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57\": container with ID starting with e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57 not found: ID does not exist" containerID="e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.323858 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57"} err="failed to get container status \"e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57\": rpc error: code = NotFound desc = could not find container \"e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57\": container with ID starting with e72625e957548f3727fd766db6ae2aae7742532e0000eb8c431a6d362406ca57 not found: ID does not exist" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.323890 4876 scope.go:117] "RemoveContainer" containerID="f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731" Dec 15 08:49:25 crc kubenswrapper[4876]: E1215 08:49:25.324578 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731\": container with ID starting with f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731 not found: ID does not exist" containerID="f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.324633 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731"} err="failed to get container status \"f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731\": rpc error: code = NotFound desc = could not find container \"f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731\": container with ID starting with f4a7975689638ff47568458adfef34107b4dac45677d8a79536938b03159e731 not found: ID does not exist" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.423321 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-sb\") pod \"0476a74d-c469-49d5-8955-522e00ef8a35\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.423496 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-config\") pod \"0476a74d-c469-49d5-8955-522e00ef8a35\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.423667 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-nb\") pod \"0476a74d-c469-49d5-8955-522e00ef8a35\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.423830 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-dns-svc\") pod \"0476a74d-c469-49d5-8955-522e00ef8a35\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.423932 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftc8l\" (UniqueName: \"kubernetes.io/projected/0476a74d-c469-49d5-8955-522e00ef8a35-kube-api-access-ftc8l\") pod \"0476a74d-c469-49d5-8955-522e00ef8a35\" (UID: \"0476a74d-c469-49d5-8955-522e00ef8a35\") " Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.429888 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0476a74d-c469-49d5-8955-522e00ef8a35-kube-api-access-ftc8l" (OuterVolumeSpecName: "kube-api-access-ftc8l") pod "0476a74d-c469-49d5-8955-522e00ef8a35" (UID: "0476a74d-c469-49d5-8955-522e00ef8a35"). InnerVolumeSpecName "kube-api-access-ftc8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.464639 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0476a74d-c469-49d5-8955-522e00ef8a35" (UID: "0476a74d-c469-49d5-8955-522e00ef8a35"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.464651 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0476a74d-c469-49d5-8955-522e00ef8a35" (UID: "0476a74d-c469-49d5-8955-522e00ef8a35"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.467677 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0476a74d-c469-49d5-8955-522e00ef8a35" (UID: "0476a74d-c469-49d5-8955-522e00ef8a35"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.469747 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-config" (OuterVolumeSpecName: "config") pod "0476a74d-c469-49d5-8955-522e00ef8a35" (UID: "0476a74d-c469-49d5-8955-522e00ef8a35"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.528775 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.528852 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.528865 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.528876 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftc8l\" (UniqueName: \"kubernetes.io/projected/0476a74d-c469-49d5-8955-522e00ef8a35-kube-api-access-ftc8l\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.528915 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0476a74d-c469-49d5-8955-522e00ef8a35-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.583893 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-644987d9dc-k6rdf"] Dec 15 08:49:25 crc kubenswrapper[4876]: I1215 08:49:25.602474 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-644987d9dc-k6rdf"] Dec 15 08:49:26 crc kubenswrapper[4876]: I1215 08:49:26.715877 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0476a74d-c469-49d5-8955-522e00ef8a35" path="/var/lib/kubelet/pods/0476a74d-c469-49d5-8955-522e00ef8a35/volumes" Dec 15 08:49:29 crc kubenswrapper[4876]: I1215 08:49:29.705219 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:49:29 crc kubenswrapper[4876]: E1215 08:49:29.705898 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:49:43 crc kubenswrapper[4876]: I1215 08:49:43.706361 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:49:43 crc kubenswrapper[4876]: E1215 08:49:43.707363 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:49:45 crc kubenswrapper[4876]: I1215 08:49:45.020495 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6799547699-m5p7w" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.735050 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-nvqqs"] Dec 15 08:49:51 crc kubenswrapper[4876]: E1215 08:49:51.736017 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0476a74d-c469-49d5-8955-522e00ef8a35" containerName="init" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.736075 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="0476a74d-c469-49d5-8955-522e00ef8a35" containerName="init" Dec 15 08:49:51 crc kubenswrapper[4876]: E1215 08:49:51.736121 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0476a74d-c469-49d5-8955-522e00ef8a35" containerName="dnsmasq-dns" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.736131 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="0476a74d-c469-49d5-8955-522e00ef8a35" containerName="dnsmasq-dns" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.736310 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="0476a74d-c469-49d5-8955-522e00ef8a35" containerName="dnsmasq-dns" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.736900 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nvqqs" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.747285 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nvqqs"] Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.800451 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-operator-scripts\") pod \"glance-db-create-nvqqs\" (UID: \"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4\") " pod="openstack/glance-db-create-nvqqs" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.800643 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7rrj\" (UniqueName: \"kubernetes.io/projected/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-kube-api-access-l7rrj\") pod \"glance-db-create-nvqqs\" (UID: \"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4\") " pod="openstack/glance-db-create-nvqqs" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.854587 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-34c5-account-create-update-vs5gb"] Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.855917 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-34c5-account-create-update-vs5gb" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.858151 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.873418 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-34c5-account-create-update-vs5gb"] Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.902294 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7rrj\" (UniqueName: \"kubernetes.io/projected/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-kube-api-access-l7rrj\") pod \"glance-db-create-nvqqs\" (UID: \"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4\") " pod="openstack/glance-db-create-nvqqs" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.902362 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-operator-scripts\") pod \"glance-34c5-account-create-update-vs5gb\" (UID: \"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6\") " pod="openstack/glance-34c5-account-create-update-vs5gb" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.902418 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-operator-scripts\") pod \"glance-db-create-nvqqs\" (UID: \"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4\") " pod="openstack/glance-db-create-nvqqs" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.902458 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npnsg\" (UniqueName: \"kubernetes.io/projected/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-kube-api-access-npnsg\") pod \"glance-34c5-account-create-update-vs5gb\" (UID: \"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6\") " pod="openstack/glance-34c5-account-create-update-vs5gb" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.903381 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-operator-scripts\") pod \"glance-db-create-nvqqs\" (UID: \"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4\") " pod="openstack/glance-db-create-nvqqs" Dec 15 08:49:51 crc kubenswrapper[4876]: I1215 08:49:51.925008 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7rrj\" (UniqueName: \"kubernetes.io/projected/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-kube-api-access-l7rrj\") pod \"glance-db-create-nvqqs\" (UID: \"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4\") " pod="openstack/glance-db-create-nvqqs" Dec 15 08:49:52 crc kubenswrapper[4876]: I1215 08:49:52.004073 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-operator-scripts\") pod \"glance-34c5-account-create-update-vs5gb\" (UID: \"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6\") " pod="openstack/glance-34c5-account-create-update-vs5gb" Dec 15 08:49:52 crc kubenswrapper[4876]: I1215 08:49:52.004221 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npnsg\" (UniqueName: \"kubernetes.io/projected/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-kube-api-access-npnsg\") pod \"glance-34c5-account-create-update-vs5gb\" (UID: \"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6\") " pod="openstack/glance-34c5-account-create-update-vs5gb" Dec 15 08:49:52 crc kubenswrapper[4876]: I1215 08:49:52.004968 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-operator-scripts\") pod \"glance-34c5-account-create-update-vs5gb\" (UID: \"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6\") " pod="openstack/glance-34c5-account-create-update-vs5gb" Dec 15 08:49:52 crc kubenswrapper[4876]: I1215 08:49:52.037602 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npnsg\" (UniqueName: \"kubernetes.io/projected/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-kube-api-access-npnsg\") pod \"glance-34c5-account-create-update-vs5gb\" (UID: \"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6\") " pod="openstack/glance-34c5-account-create-update-vs5gb" Dec 15 08:49:52 crc kubenswrapper[4876]: I1215 08:49:52.058606 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nvqqs" Dec 15 08:49:52 crc kubenswrapper[4876]: I1215 08:49:52.171952 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-34c5-account-create-update-vs5gb" Dec 15 08:49:52 crc kubenswrapper[4876]: I1215 08:49:52.479326 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nvqqs"] Dec 15 08:49:52 crc kubenswrapper[4876]: W1215 08:49:52.491575 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb7c283e_6fad_4c7f_bfb7_703e63bf9be4.slice/crio-99179e6d5300de2dc24c5390c92ccd138a90315fd7f7eccca8f4e7f76e36e4fd WatchSource:0}: Error finding container 99179e6d5300de2dc24c5390c92ccd138a90315fd7f7eccca8f4e7f76e36e4fd: Status 404 returned error can't find the container with id 99179e6d5300de2dc24c5390c92ccd138a90315fd7f7eccca8f4e7f76e36e4fd Dec 15 08:49:52 crc kubenswrapper[4876]: I1215 08:49:52.598338 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-34c5-account-create-update-vs5gb"] Dec 15 08:49:52 crc kubenswrapper[4876]: W1215 08:49:52.601039 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod687b9ec8_76f9_4e67_9dc6_f55efb50f9f6.slice/crio-baed815da469ee71e3c3baf0173fc8fd2d45f8e26217b8be310fb975791d5b73 WatchSource:0}: Error finding container baed815da469ee71e3c3baf0173fc8fd2d45f8e26217b8be310fb975791d5b73: Status 404 returned error can't find the container with id baed815da469ee71e3c3baf0173fc8fd2d45f8e26217b8be310fb975791d5b73 Dec 15 08:49:53 crc kubenswrapper[4876]: I1215 08:49:53.488973 4876 generic.go:334] "Generic (PLEG): container finished" podID="eb7c283e-6fad-4c7f-bfb7-703e63bf9be4" containerID="fb4cbc4feb22b9cfedbf8e695e30e35e84746172a048ac6a5fb3eef7548a4c90" exitCode=0 Dec 15 08:49:53 crc kubenswrapper[4876]: I1215 08:49:53.489060 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nvqqs" event={"ID":"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4","Type":"ContainerDied","Data":"fb4cbc4feb22b9cfedbf8e695e30e35e84746172a048ac6a5fb3eef7548a4c90"} Dec 15 08:49:53 crc kubenswrapper[4876]: I1215 08:49:53.489393 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nvqqs" event={"ID":"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4","Type":"ContainerStarted","Data":"99179e6d5300de2dc24c5390c92ccd138a90315fd7f7eccca8f4e7f76e36e4fd"} Dec 15 08:49:53 crc kubenswrapper[4876]: I1215 08:49:53.491738 4876 generic.go:334] "Generic (PLEG): container finished" podID="687b9ec8-76f9-4e67-9dc6-f55efb50f9f6" containerID="349ee55e5f079000e47bcfce05e88385252e256308f374f6d22eff79cc789ae6" exitCode=0 Dec 15 08:49:53 crc kubenswrapper[4876]: I1215 08:49:53.491777 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-34c5-account-create-update-vs5gb" event={"ID":"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6","Type":"ContainerDied","Data":"349ee55e5f079000e47bcfce05e88385252e256308f374f6d22eff79cc789ae6"} Dec 15 08:49:53 crc kubenswrapper[4876]: I1215 08:49:53.491814 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-34c5-account-create-update-vs5gb" event={"ID":"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6","Type":"ContainerStarted","Data":"baed815da469ee71e3c3baf0173fc8fd2d45f8e26217b8be310fb975791d5b73"} Dec 15 08:49:54 crc kubenswrapper[4876]: I1215 08:49:54.872986 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-34c5-account-create-update-vs5gb" Dec 15 08:49:54 crc kubenswrapper[4876]: I1215 08:49:54.880626 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nvqqs" Dec 15 08:49:54 crc kubenswrapper[4876]: I1215 08:49:54.946435 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-operator-scripts\") pod \"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4\" (UID: \"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4\") " Dec 15 08:49:54 crc kubenswrapper[4876]: I1215 08:49:54.946513 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npnsg\" (UniqueName: \"kubernetes.io/projected/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-kube-api-access-npnsg\") pod \"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6\" (UID: \"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6\") " Dec 15 08:49:54 crc kubenswrapper[4876]: I1215 08:49:54.946564 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-operator-scripts\") pod \"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6\" (UID: \"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6\") " Dec 15 08:49:54 crc kubenswrapper[4876]: I1215 08:49:54.946609 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7rrj\" (UniqueName: \"kubernetes.io/projected/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-kube-api-access-l7rrj\") pod \"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4\" (UID: \"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4\") " Dec 15 08:49:54 crc kubenswrapper[4876]: I1215 08:49:54.946965 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eb7c283e-6fad-4c7f-bfb7-703e63bf9be4" (UID: "eb7c283e-6fad-4c7f-bfb7-703e63bf9be4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:49:54 crc kubenswrapper[4876]: I1215 08:49:54.947140 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "687b9ec8-76f9-4e67-9dc6-f55efb50f9f6" (UID: "687b9ec8-76f9-4e67-9dc6-f55efb50f9f6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:49:54 crc kubenswrapper[4876]: I1215 08:49:54.956357 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-kube-api-access-l7rrj" (OuterVolumeSpecName: "kube-api-access-l7rrj") pod "eb7c283e-6fad-4c7f-bfb7-703e63bf9be4" (UID: "eb7c283e-6fad-4c7f-bfb7-703e63bf9be4"). InnerVolumeSpecName "kube-api-access-l7rrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:49:54 crc kubenswrapper[4876]: I1215 08:49:54.956442 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-kube-api-access-npnsg" (OuterVolumeSpecName: "kube-api-access-npnsg") pod "687b9ec8-76f9-4e67-9dc6-f55efb50f9f6" (UID: "687b9ec8-76f9-4e67-9dc6-f55efb50f9f6"). InnerVolumeSpecName "kube-api-access-npnsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.048650 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npnsg\" (UniqueName: \"kubernetes.io/projected/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-kube-api-access-npnsg\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.048694 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.048707 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7rrj\" (UniqueName: \"kubernetes.io/projected/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-kube-api-access-l7rrj\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.048722 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.512068 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-34c5-account-create-update-vs5gb" event={"ID":"687b9ec8-76f9-4e67-9dc6-f55efb50f9f6","Type":"ContainerDied","Data":"baed815da469ee71e3c3baf0173fc8fd2d45f8e26217b8be310fb975791d5b73"} Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.512161 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="baed815da469ee71e3c3baf0173fc8fd2d45f8e26217b8be310fb975791d5b73" Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.512261 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-34c5-account-create-update-vs5gb" Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.515436 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nvqqs" event={"ID":"eb7c283e-6fad-4c7f-bfb7-703e63bf9be4","Type":"ContainerDied","Data":"99179e6d5300de2dc24c5390c92ccd138a90315fd7f7eccca8f4e7f76e36e4fd"} Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.515486 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99179e6d5300de2dc24c5390c92ccd138a90315fd7f7eccca8f4e7f76e36e4fd" Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.515562 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nvqqs" Dec 15 08:49:55 crc kubenswrapper[4876]: I1215 08:49:55.705805 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:49:55 crc kubenswrapper[4876]: E1215 08:49:55.706087 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:49:56 crc kubenswrapper[4876]: I1215 08:49:56.998438 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-qd7sv"] Dec 15 08:49:56 crc kubenswrapper[4876]: E1215 08:49:56.999695 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="687b9ec8-76f9-4e67-9dc6-f55efb50f9f6" containerName="mariadb-account-create-update" Dec 15 08:49:56 crc kubenswrapper[4876]: I1215 08:49:56.999723 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="687b9ec8-76f9-4e67-9dc6-f55efb50f9f6" containerName="mariadb-account-create-update" Dec 15 08:49:56 crc kubenswrapper[4876]: E1215 08:49:56.999746 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb7c283e-6fad-4c7f-bfb7-703e63bf9be4" containerName="mariadb-database-create" Dec 15 08:49:56 crc kubenswrapper[4876]: I1215 08:49:56.999757 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb7c283e-6fad-4c7f-bfb7-703e63bf9be4" containerName="mariadb-database-create" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:56.999962 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb7c283e-6fad-4c7f-bfb7-703e63bf9be4" containerName="mariadb-database-create" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:56.999989 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="687b9ec8-76f9-4e67-9dc6-f55efb50f9f6" containerName="mariadb-account-create-update" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.000584 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.002347 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tb9rg" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.003376 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.018333 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-qd7sv"] Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.181515 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-combined-ca-bundle\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.181597 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-db-sync-config-data\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.181862 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-config-data\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.181896 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j68d\" (UniqueName: \"kubernetes.io/projected/27a2a74e-1254-40a9-bd85-a381999b2a8c-kube-api-access-6j68d\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.283459 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-config-data\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.283516 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j68d\" (UniqueName: \"kubernetes.io/projected/27a2a74e-1254-40a9-bd85-a381999b2a8c-kube-api-access-6j68d\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.283624 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-combined-ca-bundle\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.283662 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-db-sync-config-data\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.289835 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-combined-ca-bundle\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.302194 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-db-sync-config-data\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.304647 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-config-data\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.317651 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j68d\" (UniqueName: \"kubernetes.io/projected/27a2a74e-1254-40a9-bd85-a381999b2a8c-kube-api-access-6j68d\") pod \"glance-db-sync-qd7sv\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:57 crc kubenswrapper[4876]: I1215 08:49:57.616651 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qd7sv" Dec 15 08:49:58 crc kubenswrapper[4876]: I1215 08:49:58.225486 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-qd7sv"] Dec 15 08:49:58 crc kubenswrapper[4876]: I1215 08:49:58.546782 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qd7sv" event={"ID":"27a2a74e-1254-40a9-bd85-a381999b2a8c","Type":"ContainerStarted","Data":"adec62484146c98bbb5d2d1bb125cc3a8f948e56b68a0c8c9e561bf8655ff4af"} Dec 15 08:50:10 crc kubenswrapper[4876]: I1215 08:50:10.706528 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:50:10 crc kubenswrapper[4876]: E1215 08:50:10.708150 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:50:15 crc kubenswrapper[4876]: I1215 08:50:15.689959 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qd7sv" event={"ID":"27a2a74e-1254-40a9-bd85-a381999b2a8c","Type":"ContainerStarted","Data":"1373d38b5c2b523d86836d0af2a2fc4b453a51e1d6915834ddd83a4b61fc6746"} Dec 15 08:50:15 crc kubenswrapper[4876]: I1215 08:50:15.712743 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-qd7sv" podStartSLOduration=2.952097243 podStartE2EDuration="19.71272739s" podCreationTimestamp="2025-12-15 08:49:56 +0000 UTC" firstStartedPulling="2025-12-15 08:49:58.228908529 +0000 UTC m=+7123.800051440" lastFinishedPulling="2025-12-15 08:50:14.989538676 +0000 UTC m=+7140.560681587" observedRunningTime="2025-12-15 08:50:15.709637847 +0000 UTC m=+7141.280780778" watchObservedRunningTime="2025-12-15 08:50:15.71272739 +0000 UTC m=+7141.283870301" Dec 15 08:50:18 crc kubenswrapper[4876]: I1215 08:50:18.715009 4876 generic.go:334] "Generic (PLEG): container finished" podID="27a2a74e-1254-40a9-bd85-a381999b2a8c" containerID="1373d38b5c2b523d86836d0af2a2fc4b453a51e1d6915834ddd83a4b61fc6746" exitCode=0 Dec 15 08:50:18 crc kubenswrapper[4876]: I1215 08:50:18.715489 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qd7sv" event={"ID":"27a2a74e-1254-40a9-bd85-a381999b2a8c","Type":"ContainerDied","Data":"1373d38b5c2b523d86836d0af2a2fc4b453a51e1d6915834ddd83a4b61fc6746"} Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.080601 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qd7sv" Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.199113 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-config-data\") pod \"27a2a74e-1254-40a9-bd85-a381999b2a8c\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.199157 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-db-sync-config-data\") pod \"27a2a74e-1254-40a9-bd85-a381999b2a8c\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.199203 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-combined-ca-bundle\") pod \"27a2a74e-1254-40a9-bd85-a381999b2a8c\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.199230 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6j68d\" (UniqueName: \"kubernetes.io/projected/27a2a74e-1254-40a9-bd85-a381999b2a8c-kube-api-access-6j68d\") pod \"27a2a74e-1254-40a9-bd85-a381999b2a8c\" (UID: \"27a2a74e-1254-40a9-bd85-a381999b2a8c\") " Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.204004 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "27a2a74e-1254-40a9-bd85-a381999b2a8c" (UID: "27a2a74e-1254-40a9-bd85-a381999b2a8c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.205337 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27a2a74e-1254-40a9-bd85-a381999b2a8c-kube-api-access-6j68d" (OuterVolumeSpecName: "kube-api-access-6j68d") pod "27a2a74e-1254-40a9-bd85-a381999b2a8c" (UID: "27a2a74e-1254-40a9-bd85-a381999b2a8c"). InnerVolumeSpecName "kube-api-access-6j68d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.220540 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27a2a74e-1254-40a9-bd85-a381999b2a8c" (UID: "27a2a74e-1254-40a9-bd85-a381999b2a8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.240597 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-config-data" (OuterVolumeSpecName: "config-data") pod "27a2a74e-1254-40a9-bd85-a381999b2a8c" (UID: "27a2a74e-1254-40a9-bd85-a381999b2a8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.300784 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.300830 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6j68d\" (UniqueName: \"kubernetes.io/projected/27a2a74e-1254-40a9-bd85-a381999b2a8c-kube-api-access-6j68d\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.300842 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.300854 4876 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/27a2a74e-1254-40a9-bd85-a381999b2a8c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.732939 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-qd7sv" event={"ID":"27a2a74e-1254-40a9-bd85-a381999b2a8c","Type":"ContainerDied","Data":"adec62484146c98bbb5d2d1bb125cc3a8f948e56b68a0c8c9e561bf8655ff4af"} Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.732984 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-qd7sv" Dec 15 08:50:20 crc kubenswrapper[4876]: I1215 08:50:20.732989 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adec62484146c98bbb5d2d1bb125cc3a8f948e56b68a0c8c9e561bf8655ff4af" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.006171 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:50:21 crc kubenswrapper[4876]: E1215 08:50:21.006883 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27a2a74e-1254-40a9-bd85-a381999b2a8c" containerName="glance-db-sync" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.006905 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="27a2a74e-1254-40a9-bd85-a381999b2a8c" containerName="glance-db-sync" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.007153 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="27a2a74e-1254-40a9-bd85-a381999b2a8c" containerName="glance-db-sync" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.008238 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.010306 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.014747 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.014762 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.015443 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tb9rg" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.053043 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.112476 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-logs\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.112538 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.112562 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cgqr\" (UniqueName: \"kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-kube-api-access-2cgqr\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.112594 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-scripts\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.112617 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.112643 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-ceph\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.112671 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-config-data\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.137979 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84d576d867-gs2tx"] Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.139483 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.153318 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84d576d867-gs2tx"] Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.205405 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.206775 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.210007 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.213635 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-logs\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.213858 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.213961 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cgqr\" (UniqueName: \"kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-kube-api-access-2cgqr\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.214082 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-scripts\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.214230 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.214342 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-ceph\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.214448 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-config-data\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.219456 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-config-data\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.219842 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.219610 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-logs\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.222938 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-ceph\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.226705 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.227300 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-scripts\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.239603 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.241613 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cgqr\" (UniqueName: \"kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-kube-api-access-2cgqr\") pod \"glance-default-external-api-0\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316086 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-ceph\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316203 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-config-data\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316235 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316262 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-scripts\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316290 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jrg6\" (UniqueName: \"kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-kube-api-access-2jrg6\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316312 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-nb\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316333 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-logs\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316501 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-sb\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316637 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nptqw\" (UniqueName: \"kubernetes.io/projected/401c87b1-11fe-4f5f-906c-19ca84ae348f-kube-api-access-nptqw\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316665 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-dns-svc\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316691 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.316806 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-config\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.329859 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418052 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-sb\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418129 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nptqw\" (UniqueName: \"kubernetes.io/projected/401c87b1-11fe-4f5f-906c-19ca84ae348f-kube-api-access-nptqw\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418152 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-dns-svc\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418168 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418203 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-config\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418242 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-ceph\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418291 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-config-data\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418310 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418328 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-scripts\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418343 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jrg6\" (UniqueName: \"kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-kube-api-access-2jrg6\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418356 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-nb\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418370 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-logs\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.418799 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-logs\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.419600 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-sb\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.420500 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-dns-svc\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.423388 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.424661 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-config\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.427273 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-nb\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.445648 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-config-data\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.451438 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-ceph\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.452767 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-scripts\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.457663 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.462557 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jrg6\" (UniqueName: \"kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-kube-api-access-2jrg6\") pod \"glance-default-internal-api-0\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.461880 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nptqw\" (UniqueName: \"kubernetes.io/projected/401c87b1-11fe-4f5f-906c-19ca84ae348f-kube-api-access-nptqw\") pod \"dnsmasq-dns-84d576d867-gs2tx\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.598643 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.706908 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:50:21 crc kubenswrapper[4876]: E1215 08:50:21.707174 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.760700 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:21 crc kubenswrapper[4876]: I1215 08:50:21.872687 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:50:21 crc kubenswrapper[4876]: W1215 08:50:21.879670 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod18692bd9_e543_46e3_95e3_88a0fae79370.slice/crio-402ae1cf749943e8ed27d019a788da162c645a1eb3eb917a417665cba4a9ef43 WatchSource:0}: Error finding container 402ae1cf749943e8ed27d019a788da162c645a1eb3eb917a417665cba4a9ef43: Status 404 returned error can't find the container with id 402ae1cf749943e8ed27d019a788da162c645a1eb3eb917a417665cba4a9ef43 Dec 15 08:50:22 crc kubenswrapper[4876]: I1215 08:50:22.075789 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:50:22 crc kubenswrapper[4876]: I1215 08:50:22.145075 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:50:22 crc kubenswrapper[4876]: W1215 08:50:22.147440 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc12f433_00b5_4cbd_9961_d6a4659b2444.slice/crio-12e23ae2a7ec9e8cc8311351fb699da5f9872fdbc9d1a0639491cf08838d79be WatchSource:0}: Error finding container 12e23ae2a7ec9e8cc8311351fb699da5f9872fdbc9d1a0639491cf08838d79be: Status 404 returned error can't find the container with id 12e23ae2a7ec9e8cc8311351fb699da5f9872fdbc9d1a0639491cf08838d79be Dec 15 08:50:22 crc kubenswrapper[4876]: I1215 08:50:22.296233 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84d576d867-gs2tx"] Dec 15 08:50:22 crc kubenswrapper[4876]: I1215 08:50:22.884860 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18692bd9-e543-46e3-95e3-88a0fae79370","Type":"ContainerStarted","Data":"402ae1cf749943e8ed27d019a788da162c645a1eb3eb917a417665cba4a9ef43"} Dec 15 08:50:22 crc kubenswrapper[4876]: I1215 08:50:22.894015 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dc12f433-00b5-4cbd-9961-d6a4659b2444","Type":"ContainerStarted","Data":"12e23ae2a7ec9e8cc8311351fb699da5f9872fdbc9d1a0639491cf08838d79be"} Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.904522 4876 generic.go:334] "Generic (PLEG): container finished" podID="401c87b1-11fe-4f5f-906c-19ca84ae348f" containerID="78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9" exitCode=0 Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.904587 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" event={"ID":"401c87b1-11fe-4f5f-906c-19ca84ae348f","Type":"ContainerDied","Data":"78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9"} Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.904943 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" event={"ID":"401c87b1-11fe-4f5f-906c-19ca84ae348f","Type":"ContainerStarted","Data":"3341584bad5ec0cda9db2a4e69989ff97f0c3766aa2938aef4fae3bebd07bd96"} Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.907209 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18692bd9-e543-46e3-95e3-88a0fae79370","Type":"ContainerStarted","Data":"efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c"} Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.907258 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18692bd9-e543-46e3-95e3-88a0fae79370","Type":"ContainerStarted","Data":"6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d"} Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.907248 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="18692bd9-e543-46e3-95e3-88a0fae79370" containerName="glance-log" containerID="cri-o://6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d" gracePeriod=30 Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.907280 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="18692bd9-e543-46e3-95e3-88a0fae79370" containerName="glance-httpd" containerID="cri-o://efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c" gracePeriod=30 Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.911508 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dc12f433-00b5-4cbd-9961-d6a4659b2444","Type":"ContainerStarted","Data":"f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12"} Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.911805 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dc12f433-00b5-4cbd-9961-d6a4659b2444","Type":"ContainerStarted","Data":"6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba"} Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.976258 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.97624057 podStartE2EDuration="2.97624057s" podCreationTimestamp="2025-12-15 08:50:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:50:23.972743436 +0000 UTC m=+7149.543886347" watchObservedRunningTime="2025-12-15 08:50:23.97624057 +0000 UTC m=+7149.547383481" Dec 15 08:50:23 crc kubenswrapper[4876]: I1215 08:50:23.998340 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.998288283 podStartE2EDuration="3.998288283s" podCreationTimestamp="2025-12-15 08:50:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:50:23.997800129 +0000 UTC m=+7149.568943040" watchObservedRunningTime="2025-12-15 08:50:23.998288283 +0000 UTC m=+7149.569431194" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.391433 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.552932 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.616001 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-ceph\") pod \"18692bd9-e543-46e3-95e3-88a0fae79370\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.617147 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cgqr\" (UniqueName: \"kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-kube-api-access-2cgqr\") pod \"18692bd9-e543-46e3-95e3-88a0fae79370\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.617183 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-config-data\") pod \"18692bd9-e543-46e3-95e3-88a0fae79370\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.617261 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-logs\") pod \"18692bd9-e543-46e3-95e3-88a0fae79370\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.617682 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-httpd-run\") pod \"18692bd9-e543-46e3-95e3-88a0fae79370\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.617764 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-scripts\") pod \"18692bd9-e543-46e3-95e3-88a0fae79370\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.617793 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-combined-ca-bundle\") pod \"18692bd9-e543-46e3-95e3-88a0fae79370\" (UID: \"18692bd9-e543-46e3-95e3-88a0fae79370\") " Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.617850 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-logs" (OuterVolumeSpecName: "logs") pod "18692bd9-e543-46e3-95e3-88a0fae79370" (UID: "18692bd9-e543-46e3-95e3-88a0fae79370"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.618156 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "18692bd9-e543-46e3-95e3-88a0fae79370" (UID: "18692bd9-e543-46e3-95e3-88a0fae79370"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.618502 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.618519 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18692bd9-e543-46e3-95e3-88a0fae79370-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.620483 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-ceph" (OuterVolumeSpecName: "ceph") pod "18692bd9-e543-46e3-95e3-88a0fae79370" (UID: "18692bd9-e543-46e3-95e3-88a0fae79370"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.620605 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-scripts" (OuterVolumeSpecName: "scripts") pod "18692bd9-e543-46e3-95e3-88a0fae79370" (UID: "18692bd9-e543-46e3-95e3-88a0fae79370"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.620680 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-kube-api-access-2cgqr" (OuterVolumeSpecName: "kube-api-access-2cgqr") pod "18692bd9-e543-46e3-95e3-88a0fae79370" (UID: "18692bd9-e543-46e3-95e3-88a0fae79370"). InnerVolumeSpecName "kube-api-access-2cgqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.645282 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18692bd9-e543-46e3-95e3-88a0fae79370" (UID: "18692bd9-e543-46e3-95e3-88a0fae79370"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.663185 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-config-data" (OuterVolumeSpecName: "config-data") pod "18692bd9-e543-46e3-95e3-88a0fae79370" (UID: "18692bd9-e543-46e3-95e3-88a0fae79370"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.719856 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cgqr\" (UniqueName: \"kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-kube-api-access-2cgqr\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.719892 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.719933 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.719944 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18692bd9-e543-46e3-95e3-88a0fae79370-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.719952 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/18692bd9-e543-46e3-95e3-88a0fae79370-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.921192 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" event={"ID":"401c87b1-11fe-4f5f-906c-19ca84ae348f","Type":"ContainerStarted","Data":"e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331"} Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.921322 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.923642 4876 generic.go:334] "Generic (PLEG): container finished" podID="18692bd9-e543-46e3-95e3-88a0fae79370" containerID="efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c" exitCode=0 Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.923676 4876 generic.go:334] "Generic (PLEG): container finished" podID="18692bd9-e543-46e3-95e3-88a0fae79370" containerID="6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d" exitCode=143 Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.923702 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18692bd9-e543-46e3-95e3-88a0fae79370","Type":"ContainerDied","Data":"efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c"} Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.923749 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18692bd9-e543-46e3-95e3-88a0fae79370","Type":"ContainerDied","Data":"6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d"} Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.923762 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"18692bd9-e543-46e3-95e3-88a0fae79370","Type":"ContainerDied","Data":"402ae1cf749943e8ed27d019a788da162c645a1eb3eb917a417665cba4a9ef43"} Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.923787 4876 scope.go:117] "RemoveContainer" containerID="efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.923721 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.941924 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" podStartSLOduration=3.941909131 podStartE2EDuration="3.941909131s" podCreationTimestamp="2025-12-15 08:50:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:50:24.938307654 +0000 UTC m=+7150.509450565" watchObservedRunningTime="2025-12-15 08:50:24.941909131 +0000 UTC m=+7150.513052042" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.946554 4876 scope.go:117] "RemoveContainer" containerID="6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.961492 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.969004 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.976383 4876 scope.go:117] "RemoveContainer" containerID="efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c" Dec 15 08:50:24 crc kubenswrapper[4876]: E1215 08:50:24.979922 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c\": container with ID starting with efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c not found: ID does not exist" containerID="efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.980015 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c"} err="failed to get container status \"efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c\": rpc error: code = NotFound desc = could not find container \"efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c\": container with ID starting with efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c not found: ID does not exist" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.980049 4876 scope.go:117] "RemoveContainer" containerID="6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d" Dec 15 08:50:24 crc kubenswrapper[4876]: E1215 08:50:24.980799 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d\": container with ID starting with 6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d not found: ID does not exist" containerID="6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.980841 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d"} err="failed to get container status \"6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d\": rpc error: code = NotFound desc = could not find container \"6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d\": container with ID starting with 6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d not found: ID does not exist" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.980862 4876 scope.go:117] "RemoveContainer" containerID="efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.981275 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c"} err="failed to get container status \"efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c\": rpc error: code = NotFound desc = could not find container \"efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c\": container with ID starting with efc89184616134930346710b7df142da1656dd3661b2851f7bd8de18f93f695c not found: ID does not exist" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.981303 4876 scope.go:117] "RemoveContainer" containerID="6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.981534 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d"} err="failed to get container status \"6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d\": rpc error: code = NotFound desc = could not find container \"6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d\": container with ID starting with 6acd9ed4540e41461bfd05fcfe657a25190e9047e9d73870672a8384acd84c0d not found: ID does not exist" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.991721 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:50:24 crc kubenswrapper[4876]: E1215 08:50:24.992447 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18692bd9-e543-46e3-95e3-88a0fae79370" containerName="glance-httpd" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.992472 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="18692bd9-e543-46e3-95e3-88a0fae79370" containerName="glance-httpd" Dec 15 08:50:24 crc kubenswrapper[4876]: E1215 08:50:24.992486 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18692bd9-e543-46e3-95e3-88a0fae79370" containerName="glance-log" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.992493 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="18692bd9-e543-46e3-95e3-88a0fae79370" containerName="glance-log" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.992713 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="18692bd9-e543-46e3-95e3-88a0fae79370" containerName="glance-httpd" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.992746 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="18692bd9-e543-46e3-95e3-88a0fae79370" containerName="glance-log" Dec 15 08:50:24 crc kubenswrapper[4876]: I1215 08:50:24.993956 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.000232 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.003750 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.024478 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-config-data\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.024738 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-logs\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.024783 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-ceph\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.024862 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlrl5\" (UniqueName: \"kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-kube-api-access-dlrl5\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.024939 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.025007 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-scripts\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.025025 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.126986 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlrl5\" (UniqueName: \"kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-kube-api-access-dlrl5\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.127087 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.127155 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-scripts\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.127173 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.127233 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-config-data\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.127314 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-logs\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.127339 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-ceph\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.127676 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.128541 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-logs\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.132718 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-ceph\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.132879 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-scripts\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.133504 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.133941 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-config-data\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.145355 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlrl5\" (UniqueName: \"kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-kube-api-access-dlrl5\") pod \"glance-default-external-api-0\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.320448 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.934605 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="dc12f433-00b5-4cbd-9961-d6a4659b2444" containerName="glance-log" containerID="cri-o://6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba" gracePeriod=30 Dec 15 08:50:25 crc kubenswrapper[4876]: I1215 08:50:25.934742 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="dc12f433-00b5-4cbd-9961-d6a4659b2444" containerName="glance-httpd" containerID="cri-o://f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12" gracePeriod=30 Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:25.998332 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:50:26 crc kubenswrapper[4876]: W1215 08:50:26.003195 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57c1baf1_a8c2_4d51_957f_506f65ae322d.slice/crio-c3ab203d27151bd0323b1ddc07402429f56a7ef7c668a74497554f8ad6c0efe9 WatchSource:0}: Error finding container c3ab203d27151bd0323b1ddc07402429f56a7ef7c668a74497554f8ad6c0efe9: Status 404 returned error can't find the container with id c3ab203d27151bd0323b1ddc07402429f56a7ef7c668a74497554f8ad6c0efe9 Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.664787 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.720293 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18692bd9-e543-46e3-95e3-88a0fae79370" path="/var/lib/kubelet/pods/18692bd9-e543-46e3-95e3-88a0fae79370/volumes" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.758891 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-logs\") pod \"dc12f433-00b5-4cbd-9961-d6a4659b2444\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.758976 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jrg6\" (UniqueName: \"kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-kube-api-access-2jrg6\") pod \"dc12f433-00b5-4cbd-9961-d6a4659b2444\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.759037 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-ceph\") pod \"dc12f433-00b5-4cbd-9961-d6a4659b2444\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.759181 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-combined-ca-bundle\") pod \"dc12f433-00b5-4cbd-9961-d6a4659b2444\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.759211 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-config-data\") pod \"dc12f433-00b5-4cbd-9961-d6a4659b2444\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.759243 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-scripts\") pod \"dc12f433-00b5-4cbd-9961-d6a4659b2444\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.759336 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-httpd-run\") pod \"dc12f433-00b5-4cbd-9961-d6a4659b2444\" (UID: \"dc12f433-00b5-4cbd-9961-d6a4659b2444\") " Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.760328 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-logs" (OuterVolumeSpecName: "logs") pod "dc12f433-00b5-4cbd-9961-d6a4659b2444" (UID: "dc12f433-00b5-4cbd-9961-d6a4659b2444"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.760526 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "dc12f433-00b5-4cbd-9961-d6a4659b2444" (UID: "dc12f433-00b5-4cbd-9961-d6a4659b2444"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.763731 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-ceph" (OuterVolumeSpecName: "ceph") pod "dc12f433-00b5-4cbd-9961-d6a4659b2444" (UID: "dc12f433-00b5-4cbd-9961-d6a4659b2444"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.763876 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-kube-api-access-2jrg6" (OuterVolumeSpecName: "kube-api-access-2jrg6") pod "dc12f433-00b5-4cbd-9961-d6a4659b2444" (UID: "dc12f433-00b5-4cbd-9961-d6a4659b2444"). InnerVolumeSpecName "kube-api-access-2jrg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.763953 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-scripts" (OuterVolumeSpecName: "scripts") pod "dc12f433-00b5-4cbd-9961-d6a4659b2444" (UID: "dc12f433-00b5-4cbd-9961-d6a4659b2444"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.786964 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc12f433-00b5-4cbd-9961-d6a4659b2444" (UID: "dc12f433-00b5-4cbd-9961-d6a4659b2444"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.811067 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-config-data" (OuterVolumeSpecName: "config-data") pod "dc12f433-00b5-4cbd-9961-d6a4659b2444" (UID: "dc12f433-00b5-4cbd-9961-d6a4659b2444"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.861915 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.861958 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc12f433-00b5-4cbd-9961-d6a4659b2444-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.861971 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jrg6\" (UniqueName: \"kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-kube-api-access-2jrg6\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.861983 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/dc12f433-00b5-4cbd-9961-d6a4659b2444-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.861995 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.862006 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.862016 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc12f433-00b5-4cbd-9961-d6a4659b2444-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.968561 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57c1baf1-a8c2-4d51-957f-506f65ae322d","Type":"ContainerStarted","Data":"412d1fc7c07db9fe1b461923a490a486ad3773c9a21809ffbc371a1804994683"} Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.968625 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57c1baf1-a8c2-4d51-957f-506f65ae322d","Type":"ContainerStarted","Data":"c3ab203d27151bd0323b1ddc07402429f56a7ef7c668a74497554f8ad6c0efe9"} Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.972272 4876 generic.go:334] "Generic (PLEG): container finished" podID="dc12f433-00b5-4cbd-9961-d6a4659b2444" containerID="f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12" exitCode=0 Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.972308 4876 generic.go:334] "Generic (PLEG): container finished" podID="dc12f433-00b5-4cbd-9961-d6a4659b2444" containerID="6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba" exitCode=143 Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.972330 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dc12f433-00b5-4cbd-9961-d6a4659b2444","Type":"ContainerDied","Data":"f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12"} Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.972361 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dc12f433-00b5-4cbd-9961-d6a4659b2444","Type":"ContainerDied","Data":"6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba"} Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.972375 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"dc12f433-00b5-4cbd-9961-d6a4659b2444","Type":"ContainerDied","Data":"12e23ae2a7ec9e8cc8311351fb699da5f9872fdbc9d1a0639491cf08838d79be"} Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.972397 4876 scope.go:117] "RemoveContainer" containerID="f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12" Dec 15 08:50:26 crc kubenswrapper[4876]: I1215 08:50:26.972455 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.010554 4876 scope.go:117] "RemoveContainer" containerID="6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.032658 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.045948 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.056359 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:50:27 crc kubenswrapper[4876]: E1215 08:50:27.056741 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc12f433-00b5-4cbd-9961-d6a4659b2444" containerName="glance-log" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.056754 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc12f433-00b5-4cbd-9961-d6a4659b2444" containerName="glance-log" Dec 15 08:50:27 crc kubenswrapper[4876]: E1215 08:50:27.056772 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc12f433-00b5-4cbd-9961-d6a4659b2444" containerName="glance-httpd" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.056777 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc12f433-00b5-4cbd-9961-d6a4659b2444" containerName="glance-httpd" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.056937 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc12f433-00b5-4cbd-9961-d6a4659b2444" containerName="glance-httpd" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.056955 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc12f433-00b5-4cbd-9961-d6a4659b2444" containerName="glance-log" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.057799 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.059978 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.067026 4876 scope.go:117] "RemoveContainer" containerID="f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12" Dec 15 08:50:27 crc kubenswrapper[4876]: E1215 08:50:27.067576 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12\": container with ID starting with f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12 not found: ID does not exist" containerID="f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.067611 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12"} err="failed to get container status \"f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12\": rpc error: code = NotFound desc = could not find container \"f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12\": container with ID starting with f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12 not found: ID does not exist" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.067636 4876 scope.go:117] "RemoveContainer" containerID="6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.080849 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:50:27 crc kubenswrapper[4876]: E1215 08:50:27.084624 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba\": container with ID starting with 6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba not found: ID does not exist" containerID="6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.084667 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba"} err="failed to get container status \"6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba\": rpc error: code = NotFound desc = could not find container \"6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba\": container with ID starting with 6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba not found: ID does not exist" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.084702 4876 scope.go:117] "RemoveContainer" containerID="f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.085162 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12"} err="failed to get container status \"f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12\": rpc error: code = NotFound desc = could not find container \"f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12\": container with ID starting with f70ef65148574243c2e7d502e978e09579ccae47af1bb1a9e11cb9972a239d12 not found: ID does not exist" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.085213 4876 scope.go:117] "RemoveContainer" containerID="6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.085849 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba"} err="failed to get container status \"6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba\": rpc error: code = NotFound desc = could not find container \"6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba\": container with ID starting with 6b7e20d179b9b84378645656740fab01f185187c8a8f115866e918cad427fdba not found: ID does not exist" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.173389 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-ceph\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.173489 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-scripts\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.173539 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.173569 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5llmt\" (UniqueName: \"kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-kube-api-access-5llmt\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.173660 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-config-data\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.173683 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.173704 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-logs\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.275304 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-config-data\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.275655 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.275677 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-logs\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.275736 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-ceph\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.275801 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-scripts\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.275846 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.275874 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5llmt\" (UniqueName: \"kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-kube-api-access-5llmt\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.276263 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-logs\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.276394 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.281401 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-config-data\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.281810 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-scripts\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.283670 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-ceph\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.285868 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.297916 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5llmt\" (UniqueName: \"kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-kube-api-access-5llmt\") pod \"glance-default-internal-api-0\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.398627 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.933787 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:50:27 crc kubenswrapper[4876]: W1215 08:50:27.938123 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod588da42d_4a4a_4a12_80dc_79a76c714258.slice/crio-b8b202ab4db0c8cca3c4020d98fdfc3e72b2304a39b0238f123900af80d4b1db WatchSource:0}: Error finding container b8b202ab4db0c8cca3c4020d98fdfc3e72b2304a39b0238f123900af80d4b1db: Status 404 returned error can't find the container with id b8b202ab4db0c8cca3c4020d98fdfc3e72b2304a39b0238f123900af80d4b1db Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.985506 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"588da42d-4a4a-4a12-80dc-79a76c714258","Type":"ContainerStarted","Data":"b8b202ab4db0c8cca3c4020d98fdfc3e72b2304a39b0238f123900af80d4b1db"} Dec 15 08:50:27 crc kubenswrapper[4876]: I1215 08:50:27.988887 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57c1baf1-a8c2-4d51-957f-506f65ae322d","Type":"ContainerStarted","Data":"01e43229f75b3fd7b4153fa581eb61f3e720307a2d0c54cccf8cf327d700d6ef"} Dec 15 08:50:28 crc kubenswrapper[4876]: I1215 08:50:28.020351 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.020329818 podStartE2EDuration="4.020329818s" podCreationTimestamp="2025-12-15 08:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:50:28.011919872 +0000 UTC m=+7153.583062803" watchObservedRunningTime="2025-12-15 08:50:28.020329818 +0000 UTC m=+7153.591472729" Dec 15 08:50:28 crc kubenswrapper[4876]: I1215 08:50:28.718867 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc12f433-00b5-4cbd-9961-d6a4659b2444" path="/var/lib/kubelet/pods/dc12f433-00b5-4cbd-9961-d6a4659b2444/volumes" Dec 15 08:50:28 crc kubenswrapper[4876]: I1215 08:50:28.999131 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"588da42d-4a4a-4a12-80dc-79a76c714258","Type":"ContainerStarted","Data":"c6b200ccb13c1fb7fd76d1de54ae7a1f85fa283104d8a492c8871dbb7dceff04"} Dec 15 08:50:28 crc kubenswrapper[4876]: I1215 08:50:28.999201 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"588da42d-4a4a-4a12-80dc-79a76c714258","Type":"ContainerStarted","Data":"e4c225a9aa22d556b60fb700bada998d4cd233542e748d0d8a6532d8bedaa65e"} Dec 15 08:50:29 crc kubenswrapper[4876]: I1215 08:50:29.031838 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.031819031 podStartE2EDuration="2.031819031s" podCreationTimestamp="2025-12-15 08:50:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:50:29.02655434 +0000 UTC m=+7154.597697261" watchObservedRunningTime="2025-12-15 08:50:29.031819031 +0000 UTC m=+7154.602961942" Dec 15 08:50:31 crc kubenswrapper[4876]: I1215 08:50:31.763506 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:50:31 crc kubenswrapper[4876]: I1215 08:50:31.828821 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-645f7b94c5-nn8px"] Dec 15 08:50:31 crc kubenswrapper[4876]: I1215 08:50:31.829824 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" podUID="8f5586db-a167-4c4a-b009-bbd8a0d3b13d" containerName="dnsmasq-dns" containerID="cri-o://df1a0f71cc96e015f0804100e51ce582a52340415e6e9dd5e4c6bad83bda408a" gracePeriod=10 Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.039562 4876 generic.go:334] "Generic (PLEG): container finished" podID="8f5586db-a167-4c4a-b009-bbd8a0d3b13d" containerID="df1a0f71cc96e015f0804100e51ce582a52340415e6e9dd5e4c6bad83bda408a" exitCode=0 Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.039626 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" event={"ID":"8f5586db-a167-4c4a-b009-bbd8a0d3b13d","Type":"ContainerDied","Data":"df1a0f71cc96e015f0804100e51ce582a52340415e6e9dd5e4c6bad83bda408a"} Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.359362 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.465963 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-nb\") pod \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.466077 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-sb\") pod \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.466217 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxrcj\" (UniqueName: \"kubernetes.io/projected/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-kube-api-access-xxrcj\") pod \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.466247 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-config\") pod \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.466287 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-dns-svc\") pod \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\" (UID: \"8f5586db-a167-4c4a-b009-bbd8a0d3b13d\") " Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.471506 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-kube-api-access-xxrcj" (OuterVolumeSpecName: "kube-api-access-xxrcj") pod "8f5586db-a167-4c4a-b009-bbd8a0d3b13d" (UID: "8f5586db-a167-4c4a-b009-bbd8a0d3b13d"). InnerVolumeSpecName "kube-api-access-xxrcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.514721 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-config" (OuterVolumeSpecName: "config") pod "8f5586db-a167-4c4a-b009-bbd8a0d3b13d" (UID: "8f5586db-a167-4c4a-b009-bbd8a0d3b13d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.516001 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8f5586db-a167-4c4a-b009-bbd8a0d3b13d" (UID: "8f5586db-a167-4c4a-b009-bbd8a0d3b13d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.522132 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8f5586db-a167-4c4a-b009-bbd8a0d3b13d" (UID: "8f5586db-a167-4c4a-b009-bbd8a0d3b13d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.527648 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8f5586db-a167-4c4a-b009-bbd8a0d3b13d" (UID: "8f5586db-a167-4c4a-b009-bbd8a0d3b13d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.569019 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.569070 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.569084 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxrcj\" (UniqueName: \"kubernetes.io/projected/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-kube-api-access-xxrcj\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.569114 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:32 crc kubenswrapper[4876]: I1215 08:50:32.569128 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f5586db-a167-4c4a-b009-bbd8a0d3b13d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:33 crc kubenswrapper[4876]: I1215 08:50:33.050449 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" event={"ID":"8f5586db-a167-4c4a-b009-bbd8a0d3b13d","Type":"ContainerDied","Data":"183ff1f5b8c013f7a9d6308c619245da275d421de78f6e8889f9dd067d8b2924"} Dec 15 08:50:33 crc kubenswrapper[4876]: I1215 08:50:33.050490 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-645f7b94c5-nn8px" Dec 15 08:50:33 crc kubenswrapper[4876]: I1215 08:50:33.050519 4876 scope.go:117] "RemoveContainer" containerID="df1a0f71cc96e015f0804100e51ce582a52340415e6e9dd5e4c6bad83bda408a" Dec 15 08:50:33 crc kubenswrapper[4876]: I1215 08:50:33.072984 4876 scope.go:117] "RemoveContainer" containerID="996e96fcefcb5e5deee4331b158e43d691875b65813ccff62caf654f2a22adf1" Dec 15 08:50:33 crc kubenswrapper[4876]: I1215 08:50:33.076211 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-645f7b94c5-nn8px"] Dec 15 08:50:33 crc kubenswrapper[4876]: I1215 08:50:33.082628 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-645f7b94c5-nn8px"] Dec 15 08:50:34 crc kubenswrapper[4876]: I1215 08:50:34.714946 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:50:34 crc kubenswrapper[4876]: E1215 08:50:34.715565 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:50:34 crc kubenswrapper[4876]: I1215 08:50:34.717706 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f5586db-a167-4c4a-b009-bbd8a0d3b13d" path="/var/lib/kubelet/pods/8f5586db-a167-4c4a-b009-bbd8a0d3b13d/volumes" Dec 15 08:50:35 crc kubenswrapper[4876]: I1215 08:50:35.320980 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 15 08:50:35 crc kubenswrapper[4876]: I1215 08:50:35.321053 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 15 08:50:35 crc kubenswrapper[4876]: I1215 08:50:35.348502 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 15 08:50:35 crc kubenswrapper[4876]: I1215 08:50:35.359054 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 15 08:50:36 crc kubenswrapper[4876]: I1215 08:50:36.074966 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 15 08:50:36 crc kubenswrapper[4876]: I1215 08:50:36.075012 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 15 08:50:37 crc kubenswrapper[4876]: I1215 08:50:37.399669 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:37 crc kubenswrapper[4876]: I1215 08:50:37.401174 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:37 crc kubenswrapper[4876]: I1215 08:50:37.428085 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:37 crc kubenswrapper[4876]: I1215 08:50:37.455471 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:38 crc kubenswrapper[4876]: I1215 08:50:38.092599 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:38 crc kubenswrapper[4876]: I1215 08:50:38.092955 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:38 crc kubenswrapper[4876]: I1215 08:50:38.174812 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 15 08:50:38 crc kubenswrapper[4876]: I1215 08:50:38.174910 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 08:50:38 crc kubenswrapper[4876]: I1215 08:50:38.176961 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 15 08:50:40 crc kubenswrapper[4876]: I1215 08:50:40.103809 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 08:50:40 crc kubenswrapper[4876]: I1215 08:50:40.104917 4876 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 15 08:50:40 crc kubenswrapper[4876]: I1215 08:50:40.336025 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:40 crc kubenswrapper[4876]: I1215 08:50:40.495760 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.491997 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-k9fqd"] Dec 15 08:50:46 crc kubenswrapper[4876]: E1215 08:50:46.492963 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f5586db-a167-4c4a-b009-bbd8a0d3b13d" containerName="dnsmasq-dns" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.492982 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f5586db-a167-4c4a-b009-bbd8a0d3b13d" containerName="dnsmasq-dns" Dec 15 08:50:46 crc kubenswrapper[4876]: E1215 08:50:46.493023 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f5586db-a167-4c4a-b009-bbd8a0d3b13d" containerName="init" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.493032 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f5586db-a167-4c4a-b009-bbd8a0d3b13d" containerName="init" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.493267 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f5586db-a167-4c4a-b009-bbd8a0d3b13d" containerName="dnsmasq-dns" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.494230 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-k9fqd" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.503005 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-k9fqd"] Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.605015 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-ef5a-account-create-update-rqdkt"] Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.606364 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ef5a-account-create-update-rqdkt" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.608493 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.613558 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-ef5a-account-create-update-rqdkt"] Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.682739 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stnhm\" (UniqueName: \"kubernetes.io/projected/a0c31a53-ff2a-47f9-9ce2-5083836b9871-kube-api-access-stnhm\") pod \"placement-db-create-k9fqd\" (UID: \"a0c31a53-ff2a-47f9-9ce2-5083836b9871\") " pod="openstack/placement-db-create-k9fqd" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.683060 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0c31a53-ff2a-47f9-9ce2-5083836b9871-operator-scripts\") pod \"placement-db-create-k9fqd\" (UID: \"a0c31a53-ff2a-47f9-9ce2-5083836b9871\") " pod="openstack/placement-db-create-k9fqd" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.785239 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d62244f-373f-4a19-a482-c0d99bf9970c-operator-scripts\") pod \"placement-ef5a-account-create-update-rqdkt\" (UID: \"8d62244f-373f-4a19-a482-c0d99bf9970c\") " pod="openstack/placement-ef5a-account-create-update-rqdkt" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.785420 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0c31a53-ff2a-47f9-9ce2-5083836b9871-operator-scripts\") pod \"placement-db-create-k9fqd\" (UID: \"a0c31a53-ff2a-47f9-9ce2-5083836b9871\") " pod="openstack/placement-db-create-k9fqd" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.785575 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkzcp\" (UniqueName: \"kubernetes.io/projected/8d62244f-373f-4a19-a482-c0d99bf9970c-kube-api-access-kkzcp\") pod \"placement-ef5a-account-create-update-rqdkt\" (UID: \"8d62244f-373f-4a19-a482-c0d99bf9970c\") " pod="openstack/placement-ef5a-account-create-update-rqdkt" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.785646 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stnhm\" (UniqueName: \"kubernetes.io/projected/a0c31a53-ff2a-47f9-9ce2-5083836b9871-kube-api-access-stnhm\") pod \"placement-db-create-k9fqd\" (UID: \"a0c31a53-ff2a-47f9-9ce2-5083836b9871\") " pod="openstack/placement-db-create-k9fqd" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.786222 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0c31a53-ff2a-47f9-9ce2-5083836b9871-operator-scripts\") pod \"placement-db-create-k9fqd\" (UID: \"a0c31a53-ff2a-47f9-9ce2-5083836b9871\") " pod="openstack/placement-db-create-k9fqd" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.804446 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stnhm\" (UniqueName: \"kubernetes.io/projected/a0c31a53-ff2a-47f9-9ce2-5083836b9871-kube-api-access-stnhm\") pod \"placement-db-create-k9fqd\" (UID: \"a0c31a53-ff2a-47f9-9ce2-5083836b9871\") " pod="openstack/placement-db-create-k9fqd" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.815343 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-k9fqd" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.892130 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkzcp\" (UniqueName: \"kubernetes.io/projected/8d62244f-373f-4a19-a482-c0d99bf9970c-kube-api-access-kkzcp\") pod \"placement-ef5a-account-create-update-rqdkt\" (UID: \"8d62244f-373f-4a19-a482-c0d99bf9970c\") " pod="openstack/placement-ef5a-account-create-update-rqdkt" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.892211 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d62244f-373f-4a19-a482-c0d99bf9970c-operator-scripts\") pod \"placement-ef5a-account-create-update-rqdkt\" (UID: \"8d62244f-373f-4a19-a482-c0d99bf9970c\") " pod="openstack/placement-ef5a-account-create-update-rqdkt" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.892838 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d62244f-373f-4a19-a482-c0d99bf9970c-operator-scripts\") pod \"placement-ef5a-account-create-update-rqdkt\" (UID: \"8d62244f-373f-4a19-a482-c0d99bf9970c\") " pod="openstack/placement-ef5a-account-create-update-rqdkt" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.910618 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkzcp\" (UniqueName: \"kubernetes.io/projected/8d62244f-373f-4a19-a482-c0d99bf9970c-kube-api-access-kkzcp\") pod \"placement-ef5a-account-create-update-rqdkt\" (UID: \"8d62244f-373f-4a19-a482-c0d99bf9970c\") " pod="openstack/placement-ef5a-account-create-update-rqdkt" Dec 15 08:50:46 crc kubenswrapper[4876]: I1215 08:50:46.923095 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ef5a-account-create-update-rqdkt" Dec 15 08:50:47 crc kubenswrapper[4876]: I1215 08:50:47.289713 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-k9fqd"] Dec 15 08:50:47 crc kubenswrapper[4876]: W1215 08:50:47.297075 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0c31a53_ff2a_47f9_9ce2_5083836b9871.slice/crio-7ca462f21f45b789b6ae31ef01185759752ae44459a01df8ded54ba98ac72c16 WatchSource:0}: Error finding container 7ca462f21f45b789b6ae31ef01185759752ae44459a01df8ded54ba98ac72c16: Status 404 returned error can't find the container with id 7ca462f21f45b789b6ae31ef01185759752ae44459a01df8ded54ba98ac72c16 Dec 15 08:50:47 crc kubenswrapper[4876]: I1215 08:50:47.705596 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:50:47 crc kubenswrapper[4876]: E1215 08:50:47.705794 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:50:48 crc kubenswrapper[4876]: I1215 08:50:48.077821 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-ef5a-account-create-update-rqdkt"] Dec 15 08:50:48 crc kubenswrapper[4876]: W1215 08:50:48.082461 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d62244f_373f_4a19_a482_c0d99bf9970c.slice/crio-749f851a21e26ec82340826110067600c8214570fb208fdeb298e45a59bbae08 WatchSource:0}: Error finding container 749f851a21e26ec82340826110067600c8214570fb208fdeb298e45a59bbae08: Status 404 returned error can't find the container with id 749f851a21e26ec82340826110067600c8214570fb208fdeb298e45a59bbae08 Dec 15 08:50:48 crc kubenswrapper[4876]: I1215 08:50:48.168779 4876 generic.go:334] "Generic (PLEG): container finished" podID="a0c31a53-ff2a-47f9-9ce2-5083836b9871" containerID="d27634d6bf9b83053d6b04edc16da9afce20cc4ef46a924ce82cbd961f760ae2" exitCode=0 Dec 15 08:50:48 crc kubenswrapper[4876]: I1215 08:50:48.168878 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-k9fqd" event={"ID":"a0c31a53-ff2a-47f9-9ce2-5083836b9871","Type":"ContainerDied","Data":"d27634d6bf9b83053d6b04edc16da9afce20cc4ef46a924ce82cbd961f760ae2"} Dec 15 08:50:48 crc kubenswrapper[4876]: I1215 08:50:48.169175 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-k9fqd" event={"ID":"a0c31a53-ff2a-47f9-9ce2-5083836b9871","Type":"ContainerStarted","Data":"7ca462f21f45b789b6ae31ef01185759752ae44459a01df8ded54ba98ac72c16"} Dec 15 08:50:48 crc kubenswrapper[4876]: I1215 08:50:48.170650 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ef5a-account-create-update-rqdkt" event={"ID":"8d62244f-373f-4a19-a482-c0d99bf9970c","Type":"ContainerStarted","Data":"749f851a21e26ec82340826110067600c8214570fb208fdeb298e45a59bbae08"} Dec 15 08:50:49 crc kubenswrapper[4876]: I1215 08:50:49.178774 4876 generic.go:334] "Generic (PLEG): container finished" podID="8d62244f-373f-4a19-a482-c0d99bf9970c" containerID="dd4eefd3e6d82bb36b8f90fe3fd3b89f5f0885972cc2138896eef13a8becef11" exitCode=0 Dec 15 08:50:49 crc kubenswrapper[4876]: I1215 08:50:49.178828 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ef5a-account-create-update-rqdkt" event={"ID":"8d62244f-373f-4a19-a482-c0d99bf9970c","Type":"ContainerDied","Data":"dd4eefd3e6d82bb36b8f90fe3fd3b89f5f0885972cc2138896eef13a8becef11"} Dec 15 08:50:49 crc kubenswrapper[4876]: I1215 08:50:49.556668 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-k9fqd" Dec 15 08:50:49 crc kubenswrapper[4876]: I1215 08:50:49.644361 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stnhm\" (UniqueName: \"kubernetes.io/projected/a0c31a53-ff2a-47f9-9ce2-5083836b9871-kube-api-access-stnhm\") pod \"a0c31a53-ff2a-47f9-9ce2-5083836b9871\" (UID: \"a0c31a53-ff2a-47f9-9ce2-5083836b9871\") " Dec 15 08:50:49 crc kubenswrapper[4876]: I1215 08:50:49.644638 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0c31a53-ff2a-47f9-9ce2-5083836b9871-operator-scripts\") pod \"a0c31a53-ff2a-47f9-9ce2-5083836b9871\" (UID: \"a0c31a53-ff2a-47f9-9ce2-5083836b9871\") " Dec 15 08:50:49 crc kubenswrapper[4876]: I1215 08:50:49.645504 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0c31a53-ff2a-47f9-9ce2-5083836b9871-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a0c31a53-ff2a-47f9-9ce2-5083836b9871" (UID: "a0c31a53-ff2a-47f9-9ce2-5083836b9871"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:50:49 crc kubenswrapper[4876]: I1215 08:50:49.651270 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0c31a53-ff2a-47f9-9ce2-5083836b9871-kube-api-access-stnhm" (OuterVolumeSpecName: "kube-api-access-stnhm") pod "a0c31a53-ff2a-47f9-9ce2-5083836b9871" (UID: "a0c31a53-ff2a-47f9-9ce2-5083836b9871"). InnerVolumeSpecName "kube-api-access-stnhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:50:49 crc kubenswrapper[4876]: I1215 08:50:49.746722 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0c31a53-ff2a-47f9-9ce2-5083836b9871-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:49 crc kubenswrapper[4876]: I1215 08:50:49.746752 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stnhm\" (UniqueName: \"kubernetes.io/projected/a0c31a53-ff2a-47f9-9ce2-5083836b9871-kube-api-access-stnhm\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:50 crc kubenswrapper[4876]: I1215 08:50:50.189419 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-k9fqd" Dec 15 08:50:50 crc kubenswrapper[4876]: I1215 08:50:50.189418 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-k9fqd" event={"ID":"a0c31a53-ff2a-47f9-9ce2-5083836b9871","Type":"ContainerDied","Data":"7ca462f21f45b789b6ae31ef01185759752ae44459a01df8ded54ba98ac72c16"} Dec 15 08:50:50 crc kubenswrapper[4876]: I1215 08:50:50.189848 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ca462f21f45b789b6ae31ef01185759752ae44459a01df8ded54ba98ac72c16" Dec 15 08:50:50 crc kubenswrapper[4876]: I1215 08:50:50.577296 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ef5a-account-create-update-rqdkt" Dec 15 08:50:50 crc kubenswrapper[4876]: I1215 08:50:50.597661 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkzcp\" (UniqueName: \"kubernetes.io/projected/8d62244f-373f-4a19-a482-c0d99bf9970c-kube-api-access-kkzcp\") pod \"8d62244f-373f-4a19-a482-c0d99bf9970c\" (UID: \"8d62244f-373f-4a19-a482-c0d99bf9970c\") " Dec 15 08:50:50 crc kubenswrapper[4876]: I1215 08:50:50.602904 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d62244f-373f-4a19-a482-c0d99bf9970c-kube-api-access-kkzcp" (OuterVolumeSpecName: "kube-api-access-kkzcp") pod "8d62244f-373f-4a19-a482-c0d99bf9970c" (UID: "8d62244f-373f-4a19-a482-c0d99bf9970c"). InnerVolumeSpecName "kube-api-access-kkzcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:50:50 crc kubenswrapper[4876]: I1215 08:50:50.700210 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d62244f-373f-4a19-a482-c0d99bf9970c-operator-scripts\") pod \"8d62244f-373f-4a19-a482-c0d99bf9970c\" (UID: \"8d62244f-373f-4a19-a482-c0d99bf9970c\") " Dec 15 08:50:50 crc kubenswrapper[4876]: I1215 08:50:50.700628 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkzcp\" (UniqueName: \"kubernetes.io/projected/8d62244f-373f-4a19-a482-c0d99bf9970c-kube-api-access-kkzcp\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:50 crc kubenswrapper[4876]: I1215 08:50:50.700893 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d62244f-373f-4a19-a482-c0d99bf9970c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8d62244f-373f-4a19-a482-c0d99bf9970c" (UID: "8d62244f-373f-4a19-a482-c0d99bf9970c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:50:50 crc kubenswrapper[4876]: I1215 08:50:50.802207 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d62244f-373f-4a19-a482-c0d99bf9970c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:51 crc kubenswrapper[4876]: I1215 08:50:51.197815 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ef5a-account-create-update-rqdkt" event={"ID":"8d62244f-373f-4a19-a482-c0d99bf9970c","Type":"ContainerDied","Data":"749f851a21e26ec82340826110067600c8214570fb208fdeb298e45a59bbae08"} Dec 15 08:50:51 crc kubenswrapper[4876]: I1215 08:50:51.198664 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="749f851a21e26ec82340826110067600c8214570fb208fdeb298e45a59bbae08" Dec 15 08:50:51 crc kubenswrapper[4876]: I1215 08:50:51.197879 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ef5a-account-create-update-rqdkt" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.016928 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5887476b87-vvqpk"] Dec 15 08:50:52 crc kubenswrapper[4876]: E1215 08:50:52.017309 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d62244f-373f-4a19-a482-c0d99bf9970c" containerName="mariadb-account-create-update" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.017327 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d62244f-373f-4a19-a482-c0d99bf9970c" containerName="mariadb-account-create-update" Dec 15 08:50:52 crc kubenswrapper[4876]: E1215 08:50:52.017344 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0c31a53-ff2a-47f9-9ce2-5083836b9871" containerName="mariadb-database-create" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.017351 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0c31a53-ff2a-47f9-9ce2-5083836b9871" containerName="mariadb-database-create" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.017501 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0c31a53-ff2a-47f9-9ce2-5083836b9871" containerName="mariadb-database-create" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.017518 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d62244f-373f-4a19-a482-c0d99bf9970c" containerName="mariadb-account-create-update" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.018387 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.089980 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5887476b87-vvqpk"] Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.120164 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-w88rl"] Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.121449 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.126752 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-9wvhh" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.127427 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.128059 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.133519 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-sb\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.133586 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-config\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.133664 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-dns-svc\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.133701 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sshwr\" (UniqueName: \"kubernetes.io/projected/7b351b28-6a7f-4224-9dbc-6585daf22d31-kube-api-access-sshwr\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.133727 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-nb\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.184207 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-w88rl"] Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.236003 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-sb\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.236860 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-config\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.236956 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-logs\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.237043 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-combined-ca-bundle\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.237138 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvdt8\" (UniqueName: \"kubernetes.io/projected/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-kube-api-access-mvdt8\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.237246 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-dns-svc\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.237321 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-scripts\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.237381 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sshwr\" (UniqueName: \"kubernetes.io/projected/7b351b28-6a7f-4224-9dbc-6585daf22d31-kube-api-access-sshwr\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.237443 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-nb\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.237530 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-config-data\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.238575 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-dns-svc\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.238654 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-sb\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.239274 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-config\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.239738 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-nb\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.298215 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sshwr\" (UniqueName: \"kubernetes.io/projected/7b351b28-6a7f-4224-9dbc-6585daf22d31-kube-api-access-sshwr\") pod \"dnsmasq-dns-5887476b87-vvqpk\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.339333 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-logs\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.339406 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-combined-ca-bundle\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.339436 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvdt8\" (UniqueName: \"kubernetes.io/projected/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-kube-api-access-mvdt8\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.339478 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-scripts\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.339516 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-config-data\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.342306 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-config-data\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.342535 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-logs\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.344990 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-combined-ca-bundle\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.346654 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-scripts\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.350815 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.369794 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvdt8\" (UniqueName: \"kubernetes.io/projected/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-kube-api-access-mvdt8\") pod \"placement-db-sync-w88rl\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.459452 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.944857 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-w88rl"] Dec 15 08:50:52 crc kubenswrapper[4876]: W1215 08:50:52.948463 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0be3c8f4_50fe_4da6_8afb_4fa22a938ea6.slice/crio-2a0b2e086cc288dc465495ee63f531bdca7acfdcf2be70536f2b6fd1d1c222e9 WatchSource:0}: Error finding container 2a0b2e086cc288dc465495ee63f531bdca7acfdcf2be70536f2b6fd1d1c222e9: Status 404 returned error can't find the container with id 2a0b2e086cc288dc465495ee63f531bdca7acfdcf2be70536f2b6fd1d1c222e9 Dec 15 08:50:52 crc kubenswrapper[4876]: W1215 08:50:52.950864 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b351b28_6a7f_4224_9dbc_6585daf22d31.slice/crio-6e5aaeb19696be814fd5ea24bb611412e4600b0dabb9a612bc6ee097a755e2b4 WatchSource:0}: Error finding container 6e5aaeb19696be814fd5ea24bb611412e4600b0dabb9a612bc6ee097a755e2b4: Status 404 returned error can't find the container with id 6e5aaeb19696be814fd5ea24bb611412e4600b0dabb9a612bc6ee097a755e2b4 Dec 15 08:50:52 crc kubenswrapper[4876]: I1215 08:50:52.952618 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5887476b87-vvqpk"] Dec 15 08:50:53 crc kubenswrapper[4876]: I1215 08:50:53.246736 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-w88rl" event={"ID":"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6","Type":"ContainerStarted","Data":"2a0b2e086cc288dc465495ee63f531bdca7acfdcf2be70536f2b6fd1d1c222e9"} Dec 15 08:50:53 crc kubenswrapper[4876]: I1215 08:50:53.249575 4876 generic.go:334] "Generic (PLEG): container finished" podID="7b351b28-6a7f-4224-9dbc-6585daf22d31" containerID="1dabd4e530120f244dc086f9170e447bf92e7ab9a5891ed18dd0d34e234ef2ee" exitCode=0 Dec 15 08:50:53 crc kubenswrapper[4876]: I1215 08:50:53.249625 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" event={"ID":"7b351b28-6a7f-4224-9dbc-6585daf22d31","Type":"ContainerDied","Data":"1dabd4e530120f244dc086f9170e447bf92e7ab9a5891ed18dd0d34e234ef2ee"} Dec 15 08:50:53 crc kubenswrapper[4876]: I1215 08:50:53.249657 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" event={"ID":"7b351b28-6a7f-4224-9dbc-6585daf22d31","Type":"ContainerStarted","Data":"6e5aaeb19696be814fd5ea24bb611412e4600b0dabb9a612bc6ee097a755e2b4"} Dec 15 08:50:54 crc kubenswrapper[4876]: I1215 08:50:54.260056 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" event={"ID":"7b351b28-6a7f-4224-9dbc-6585daf22d31","Type":"ContainerStarted","Data":"301c95455c770ee1cfa8b72687e27dd45a185fce8720955281cfac2dccb27071"} Dec 15 08:50:54 crc kubenswrapper[4876]: I1215 08:50:54.260338 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:50:54 crc kubenswrapper[4876]: I1215 08:50:54.283279 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" podStartSLOduration=3.283258737 podStartE2EDuration="3.283258737s" podCreationTimestamp="2025-12-15 08:50:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:50:54.2800458 +0000 UTC m=+7179.851188711" watchObservedRunningTime="2025-12-15 08:50:54.283258737 +0000 UTC m=+7179.854401658" Dec 15 08:50:57 crc kubenswrapper[4876]: I1215 08:50:57.315402 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-w88rl" event={"ID":"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6","Type":"ContainerStarted","Data":"c021ab5eb1c92eecc2b6bd97ce31e03455de6057f5dba9063d5b35ee0dbf4136"} Dec 15 08:50:57 crc kubenswrapper[4876]: I1215 08:50:57.335304 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-w88rl" podStartSLOduration=2.0374349 podStartE2EDuration="5.335280885s" podCreationTimestamp="2025-12-15 08:50:52 +0000 UTC" firstStartedPulling="2025-12-15 08:50:52.951027235 +0000 UTC m=+7178.522170146" lastFinishedPulling="2025-12-15 08:50:56.24887322 +0000 UTC m=+7181.820016131" observedRunningTime="2025-12-15 08:50:57.329752876 +0000 UTC m=+7182.900895797" watchObservedRunningTime="2025-12-15 08:50:57.335280885 +0000 UTC m=+7182.906423796" Dec 15 08:50:58 crc kubenswrapper[4876]: I1215 08:50:58.326565 4876 generic.go:334] "Generic (PLEG): container finished" podID="0be3c8f4-50fe-4da6-8afb-4fa22a938ea6" containerID="c021ab5eb1c92eecc2b6bd97ce31e03455de6057f5dba9063d5b35ee0dbf4136" exitCode=0 Dec 15 08:50:58 crc kubenswrapper[4876]: I1215 08:50:58.326696 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-w88rl" event={"ID":"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6","Type":"ContainerDied","Data":"c021ab5eb1c92eecc2b6bd97ce31e03455de6057f5dba9063d5b35ee0dbf4136"} Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.673412 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-w88rl" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.705392 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:50:59 crc kubenswrapper[4876]: E1215 08:50:59.705669 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.784924 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-config-data\") pod \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.785021 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvdt8\" (UniqueName: \"kubernetes.io/projected/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-kube-api-access-mvdt8\") pod \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.785128 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-logs\") pod \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.785364 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-scripts\") pod \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.785396 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-combined-ca-bundle\") pod \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\" (UID: \"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6\") " Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.785665 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-logs" (OuterVolumeSpecName: "logs") pod "0be3c8f4-50fe-4da6-8afb-4fa22a938ea6" (UID: "0be3c8f4-50fe-4da6-8afb-4fa22a938ea6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.785844 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.790314 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-scripts" (OuterVolumeSpecName: "scripts") pod "0be3c8f4-50fe-4da6-8afb-4fa22a938ea6" (UID: "0be3c8f4-50fe-4da6-8afb-4fa22a938ea6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.790549 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-kube-api-access-mvdt8" (OuterVolumeSpecName: "kube-api-access-mvdt8") pod "0be3c8f4-50fe-4da6-8afb-4fa22a938ea6" (UID: "0be3c8f4-50fe-4da6-8afb-4fa22a938ea6"). InnerVolumeSpecName "kube-api-access-mvdt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.808539 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0be3c8f4-50fe-4da6-8afb-4fa22a938ea6" (UID: "0be3c8f4-50fe-4da6-8afb-4fa22a938ea6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.810380 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-config-data" (OuterVolumeSpecName: "config-data") pod "0be3c8f4-50fe-4da6-8afb-4fa22a938ea6" (UID: "0be3c8f4-50fe-4da6-8afb-4fa22a938ea6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.887683 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.887723 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.887734 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:50:59 crc kubenswrapper[4876]: I1215 08:50:59.887746 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvdt8\" (UniqueName: \"kubernetes.io/projected/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6-kube-api-access-mvdt8\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.343332 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-w88rl" event={"ID":"0be3c8f4-50fe-4da6-8afb-4fa22a938ea6","Type":"ContainerDied","Data":"2a0b2e086cc288dc465495ee63f531bdca7acfdcf2be70536f2b6fd1d1c222e9"} Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.343759 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a0b2e086cc288dc465495ee63f531bdca7acfdcf2be70536f2b6fd1d1c222e9" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.343888 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-w88rl" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.498660 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-84f5ccbfbd-njqxx"] Dec 15 08:51:00 crc kubenswrapper[4876]: E1215 08:51:00.499278 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0be3c8f4-50fe-4da6-8afb-4fa22a938ea6" containerName="placement-db-sync" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.499352 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="0be3c8f4-50fe-4da6-8afb-4fa22a938ea6" containerName="placement-db-sync" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.499575 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="0be3c8f4-50fe-4da6-8afb-4fa22a938ea6" containerName="placement-db-sync" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.500677 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.502673 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.503288 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-9wvhh" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.504788 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.510053 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-84f5ccbfbd-njqxx"] Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.603409 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5463000-4fbc-4eb1-9c34-50270510d74e-logs\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.603647 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5463000-4fbc-4eb1-9c34-50270510d74e-scripts\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.603724 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzm5m\" (UniqueName: \"kubernetes.io/projected/f5463000-4fbc-4eb1-9c34-50270510d74e-kube-api-access-tzm5m\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.603924 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5463000-4fbc-4eb1-9c34-50270510d74e-combined-ca-bundle\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.604022 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5463000-4fbc-4eb1-9c34-50270510d74e-config-data\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.705853 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5463000-4fbc-4eb1-9c34-50270510d74e-scripts\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.705903 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzm5m\" (UniqueName: \"kubernetes.io/projected/f5463000-4fbc-4eb1-9c34-50270510d74e-kube-api-access-tzm5m\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.705974 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5463000-4fbc-4eb1-9c34-50270510d74e-combined-ca-bundle\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.706014 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5463000-4fbc-4eb1-9c34-50270510d74e-config-data\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.706046 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5463000-4fbc-4eb1-9c34-50270510d74e-logs\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.706515 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5463000-4fbc-4eb1-9c34-50270510d74e-logs\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.710518 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5463000-4fbc-4eb1-9c34-50270510d74e-combined-ca-bundle\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.711133 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5463000-4fbc-4eb1-9c34-50270510d74e-config-data\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.711504 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5463000-4fbc-4eb1-9c34-50270510d74e-scripts\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.731922 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzm5m\" (UniqueName: \"kubernetes.io/projected/f5463000-4fbc-4eb1-9c34-50270510d74e-kube-api-access-tzm5m\") pod \"placement-84f5ccbfbd-njqxx\" (UID: \"f5463000-4fbc-4eb1-9c34-50270510d74e\") " pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:00 crc kubenswrapper[4876]: I1215 08:51:00.833028 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:01 crc kubenswrapper[4876]: I1215 08:51:01.299647 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-84f5ccbfbd-njqxx"] Dec 15 08:51:01 crc kubenswrapper[4876]: W1215 08:51:01.305265 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5463000_4fbc_4eb1_9c34_50270510d74e.slice/crio-7c12026cfd3c8b56668a57d8885b9252b11b43100f4424980df9ea332cd62cf3 WatchSource:0}: Error finding container 7c12026cfd3c8b56668a57d8885b9252b11b43100f4424980df9ea332cd62cf3: Status 404 returned error can't find the container with id 7c12026cfd3c8b56668a57d8885b9252b11b43100f4424980df9ea332cd62cf3 Dec 15 08:51:01 crc kubenswrapper[4876]: I1215 08:51:01.351134 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-84f5ccbfbd-njqxx" event={"ID":"f5463000-4fbc-4eb1-9c34-50270510d74e","Type":"ContainerStarted","Data":"7c12026cfd3c8b56668a57d8885b9252b11b43100f4424980df9ea332cd62cf3"} Dec 15 08:51:02 crc kubenswrapper[4876]: I1215 08:51:02.353362 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:51:02 crc kubenswrapper[4876]: I1215 08:51:02.360102 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-84f5ccbfbd-njqxx" event={"ID":"f5463000-4fbc-4eb1-9c34-50270510d74e","Type":"ContainerStarted","Data":"31de760df9a9edd551e0082a60c5f5c8d96bcaad76091c73a9d9fa4ec5a7159d"} Dec 15 08:51:02 crc kubenswrapper[4876]: I1215 08:51:02.360153 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-84f5ccbfbd-njqxx" event={"ID":"f5463000-4fbc-4eb1-9c34-50270510d74e","Type":"ContainerStarted","Data":"68b17c34e5a1b711fefab5e036b0ff8f103974203d18f65d97bb82a141b5ec16"} Dec 15 08:51:02 crc kubenswrapper[4876]: I1215 08:51:02.360280 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:02 crc kubenswrapper[4876]: I1215 08:51:02.451579 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-84f5ccbfbd-njqxx" podStartSLOduration=2.445760721 podStartE2EDuration="2.445760721s" podCreationTimestamp="2025-12-15 08:51:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:51:02.399843768 +0000 UTC m=+7187.970986729" watchObservedRunningTime="2025-12-15 08:51:02.445760721 +0000 UTC m=+7188.016903642" Dec 15 08:51:02 crc kubenswrapper[4876]: I1215 08:51:02.455985 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84d576d867-gs2tx"] Dec 15 08:51:02 crc kubenswrapper[4876]: I1215 08:51:02.456297 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" podUID="401c87b1-11fe-4f5f-906c-19ca84ae348f" containerName="dnsmasq-dns" containerID="cri-o://e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331" gracePeriod=10 Dec 15 08:51:02 crc kubenswrapper[4876]: I1215 08:51:02.947127 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.044240 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-config\") pod \"401c87b1-11fe-4f5f-906c-19ca84ae348f\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.044365 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-sb\") pod \"401c87b1-11fe-4f5f-906c-19ca84ae348f\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.044477 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nptqw\" (UniqueName: \"kubernetes.io/projected/401c87b1-11fe-4f5f-906c-19ca84ae348f-kube-api-access-nptqw\") pod \"401c87b1-11fe-4f5f-906c-19ca84ae348f\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.044519 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-nb\") pod \"401c87b1-11fe-4f5f-906c-19ca84ae348f\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.044538 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-dns-svc\") pod \"401c87b1-11fe-4f5f-906c-19ca84ae348f\" (UID: \"401c87b1-11fe-4f5f-906c-19ca84ae348f\") " Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.051307 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/401c87b1-11fe-4f5f-906c-19ca84ae348f-kube-api-access-nptqw" (OuterVolumeSpecName: "kube-api-access-nptqw") pod "401c87b1-11fe-4f5f-906c-19ca84ae348f" (UID: "401c87b1-11fe-4f5f-906c-19ca84ae348f"). InnerVolumeSpecName "kube-api-access-nptqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.085626 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "401c87b1-11fe-4f5f-906c-19ca84ae348f" (UID: "401c87b1-11fe-4f5f-906c-19ca84ae348f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.087734 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-config" (OuterVolumeSpecName: "config") pod "401c87b1-11fe-4f5f-906c-19ca84ae348f" (UID: "401c87b1-11fe-4f5f-906c-19ca84ae348f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.090773 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "401c87b1-11fe-4f5f-906c-19ca84ae348f" (UID: "401c87b1-11fe-4f5f-906c-19ca84ae348f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.093481 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "401c87b1-11fe-4f5f-906c-19ca84ae348f" (UID: "401c87b1-11fe-4f5f-906c-19ca84ae348f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.146880 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.146923 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nptqw\" (UniqueName: \"kubernetes.io/projected/401c87b1-11fe-4f5f-906c-19ca84ae348f-kube-api-access-nptqw\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.146933 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.146943 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.146954 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/401c87b1-11fe-4f5f-906c-19ca84ae348f-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.369853 4876 generic.go:334] "Generic (PLEG): container finished" podID="401c87b1-11fe-4f5f-906c-19ca84ae348f" containerID="e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331" exitCode=0 Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.369940 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.369942 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" event={"ID":"401c87b1-11fe-4f5f-906c-19ca84ae348f","Type":"ContainerDied","Data":"e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331"} Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.370030 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84d576d867-gs2tx" event={"ID":"401c87b1-11fe-4f5f-906c-19ca84ae348f","Type":"ContainerDied","Data":"3341584bad5ec0cda9db2a4e69989ff97f0c3766aa2938aef4fae3bebd07bd96"} Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.370064 4876 scope.go:117] "RemoveContainer" containerID="e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.370134 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.407214 4876 scope.go:117] "RemoveContainer" containerID="78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.427231 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84d576d867-gs2tx"] Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.436170 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84d576d867-gs2tx"] Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.444027 4876 scope.go:117] "RemoveContainer" containerID="e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331" Dec 15 08:51:03 crc kubenswrapper[4876]: E1215 08:51:03.444695 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331\": container with ID starting with e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331 not found: ID does not exist" containerID="e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.444725 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331"} err="failed to get container status \"e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331\": rpc error: code = NotFound desc = could not find container \"e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331\": container with ID starting with e9098aed4db66cec6ac5ce8718d3237c542cf0c70234639edace818156cef331 not found: ID does not exist" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.444747 4876 scope.go:117] "RemoveContainer" containerID="78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9" Dec 15 08:51:03 crc kubenswrapper[4876]: E1215 08:51:03.444989 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9\": container with ID starting with 78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9 not found: ID does not exist" containerID="78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9" Dec 15 08:51:03 crc kubenswrapper[4876]: I1215 08:51:03.445012 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9"} err="failed to get container status \"78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9\": rpc error: code = NotFound desc = could not find container \"78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9\": container with ID starting with 78295d4266cc52eb3b394645ff4d65064a0b477abc00a15e5b899002029edcc9 not found: ID does not exist" Dec 15 08:51:04 crc kubenswrapper[4876]: I1215 08:51:04.720709 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="401c87b1-11fe-4f5f-906c-19ca84ae348f" path="/var/lib/kubelet/pods/401c87b1-11fe-4f5f-906c-19ca84ae348f/volumes" Dec 15 08:51:13 crc kubenswrapper[4876]: I1215 08:51:13.705417 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:51:13 crc kubenswrapper[4876]: E1215 08:51:13.706306 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:51:24 crc kubenswrapper[4876]: I1215 08:51:24.283588 4876 scope.go:117] "RemoveContainer" containerID="02e42033b0273d1d2c2f971460ccebdfefa517669fc38412e4ba355eaa1b9326" Dec 15 08:51:24 crc kubenswrapper[4876]: I1215 08:51:24.313594 4876 scope.go:117] "RemoveContainer" containerID="90439e07806b6c66d6c500f9a910142d2f236a9f91b0197d1ea346d0a0357e6b" Dec 15 08:51:28 crc kubenswrapper[4876]: I1215 08:51:28.705553 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:51:28 crc kubenswrapper[4876]: E1215 08:51:28.706480 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:51:31 crc kubenswrapper[4876]: I1215 08:51:31.910990 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:31 crc kubenswrapper[4876]: I1215 08:51:31.913271 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-84f5ccbfbd-njqxx" Dec 15 08:51:42 crc kubenswrapper[4876]: I1215 08:51:42.705781 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:51:42 crc kubenswrapper[4876]: E1215 08:51:42.706600 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.049064 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q2rnb"] Dec 15 08:51:43 crc kubenswrapper[4876]: E1215 08:51:43.049700 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="401c87b1-11fe-4f5f-906c-19ca84ae348f" containerName="init" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.049725 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="401c87b1-11fe-4f5f-906c-19ca84ae348f" containerName="init" Dec 15 08:51:43 crc kubenswrapper[4876]: E1215 08:51:43.049758 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="401c87b1-11fe-4f5f-906c-19ca84ae348f" containerName="dnsmasq-dns" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.049767 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="401c87b1-11fe-4f5f-906c-19ca84ae348f" containerName="dnsmasq-dns" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.054598 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="401c87b1-11fe-4f5f-906c-19ca84ae348f" containerName="dnsmasq-dns" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.056141 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.067859 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q2rnb"] Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.176029 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-utilities\") pod \"redhat-marketplace-q2rnb\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.176066 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phcfq\" (UniqueName: \"kubernetes.io/projected/b1c64020-1894-4004-9a0e-b7e87292534c-kube-api-access-phcfq\") pod \"redhat-marketplace-q2rnb\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.176252 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-catalog-content\") pod \"redhat-marketplace-q2rnb\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.277841 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-utilities\") pod \"redhat-marketplace-q2rnb\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.278082 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phcfq\" (UniqueName: \"kubernetes.io/projected/b1c64020-1894-4004-9a0e-b7e87292534c-kube-api-access-phcfq\") pod \"redhat-marketplace-q2rnb\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.278311 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-catalog-content\") pod \"redhat-marketplace-q2rnb\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.278386 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-utilities\") pod \"redhat-marketplace-q2rnb\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.278749 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-catalog-content\") pod \"redhat-marketplace-q2rnb\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.299965 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phcfq\" (UniqueName: \"kubernetes.io/projected/b1c64020-1894-4004-9a0e-b7e87292534c-kube-api-access-phcfq\") pod \"redhat-marketplace-q2rnb\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.379003 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:43 crc kubenswrapper[4876]: I1215 08:51:43.858799 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q2rnb"] Dec 15 08:51:44 crc kubenswrapper[4876]: I1215 08:51:44.685898 4876 generic.go:334] "Generic (PLEG): container finished" podID="b1c64020-1894-4004-9a0e-b7e87292534c" containerID="6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3" exitCode=0 Dec 15 08:51:44 crc kubenswrapper[4876]: I1215 08:51:44.685948 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2rnb" event={"ID":"b1c64020-1894-4004-9a0e-b7e87292534c","Type":"ContainerDied","Data":"6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3"} Dec 15 08:51:44 crc kubenswrapper[4876]: I1215 08:51:44.685977 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2rnb" event={"ID":"b1c64020-1894-4004-9a0e-b7e87292534c","Type":"ContainerStarted","Data":"1f525e27ef71a10c91dd57a34b6368e7ee20bdcb33ada2201d0c5a617a5208a6"} Dec 15 08:51:46 crc kubenswrapper[4876]: I1215 08:51:46.705828 4876 generic.go:334] "Generic (PLEG): container finished" podID="b1c64020-1894-4004-9a0e-b7e87292534c" containerID="5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de" exitCode=0 Dec 15 08:51:46 crc kubenswrapper[4876]: I1215 08:51:46.717011 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2rnb" event={"ID":"b1c64020-1894-4004-9a0e-b7e87292534c","Type":"ContainerDied","Data":"5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de"} Dec 15 08:51:47 crc kubenswrapper[4876]: I1215 08:51:47.738380 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2rnb" event={"ID":"b1c64020-1894-4004-9a0e-b7e87292534c","Type":"ContainerStarted","Data":"1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d"} Dec 15 08:51:47 crc kubenswrapper[4876]: I1215 08:51:47.762303 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q2rnb" podStartSLOduration=3.284245451 podStartE2EDuration="5.762282015s" podCreationTimestamp="2025-12-15 08:51:42 +0000 UTC" firstStartedPulling="2025-12-15 08:51:44.687923496 +0000 UTC m=+7230.259066407" lastFinishedPulling="2025-12-15 08:51:47.16596006 +0000 UTC m=+7232.737102971" observedRunningTime="2025-12-15 08:51:47.758014071 +0000 UTC m=+7233.329156982" watchObservedRunningTime="2025-12-15 08:51:47.762282015 +0000 UTC m=+7233.333424936" Dec 15 08:51:53 crc kubenswrapper[4876]: I1215 08:51:53.379592 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:53 crc kubenswrapper[4876]: I1215 08:51:53.380432 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:53 crc kubenswrapper[4876]: I1215 08:51:53.433606 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:53 crc kubenswrapper[4876]: I1215 08:51:53.838465 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:53 crc kubenswrapper[4876]: I1215 08:51:53.888536 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q2rnb"] Dec 15 08:51:54 crc kubenswrapper[4876]: I1215 08:51:54.705920 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:51:54 crc kubenswrapper[4876]: E1215 08:51:54.706158 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:51:55 crc kubenswrapper[4876]: I1215 08:51:55.749513 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-47w78"] Dec 15 08:51:55 crc kubenswrapper[4876]: I1215 08:51:55.751267 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-47w78" Dec 15 08:51:55 crc kubenswrapper[4876]: I1215 08:51:55.761937 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-47w78"] Dec 15 08:51:55 crc kubenswrapper[4876]: I1215 08:51:55.809191 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q2rnb" podUID="b1c64020-1894-4004-9a0e-b7e87292534c" containerName="registry-server" containerID="cri-o://1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d" gracePeriod=2 Dec 15 08:51:55 crc kubenswrapper[4876]: I1215 08:51:55.872089 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-d2d9x"] Dec 15 08:51:55 crc kubenswrapper[4876]: I1215 08:51:55.878358 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-d2d9x" Dec 15 08:51:55 crc kubenswrapper[4876]: I1215 08:51:55.880815 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-d2d9x"] Dec 15 08:51:55 crc kubenswrapper[4876]: I1215 08:51:55.903941 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpkgw\" (UniqueName: \"kubernetes.io/projected/4eaef55c-be30-42da-8330-d032fb44126a-kube-api-access-mpkgw\") pod \"nova-api-db-create-47w78\" (UID: \"4eaef55c-be30-42da-8330-d032fb44126a\") " pod="openstack/nova-api-db-create-47w78" Dec 15 08:51:55 crc kubenswrapper[4876]: I1215 08:51:55.904095 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4eaef55c-be30-42da-8330-d032fb44126a-operator-scripts\") pod \"nova-api-db-create-47w78\" (UID: \"4eaef55c-be30-42da-8330-d032fb44126a\") " pod="openstack/nova-api-db-create-47w78" Dec 15 08:51:55 crc kubenswrapper[4876]: I1215 08:51:55.997799 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-5r9dq"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.011270 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5r9dq" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.011399 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5b5542b-4fb6-4997-9a47-b867bec18c51-operator-scripts\") pod \"nova-cell0-db-create-d2d9x\" (UID: \"c5b5542b-4fb6-4997-9a47-b867bec18c51\") " pod="openstack/nova-cell0-db-create-d2d9x" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.011496 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-9b87-account-create-update-2kv7w"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.011560 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4eaef55c-be30-42da-8330-d032fb44126a-operator-scripts\") pod \"nova-api-db-create-47w78\" (UID: \"4eaef55c-be30-42da-8330-d032fb44126a\") " pod="openstack/nova-api-db-create-47w78" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.012576 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz7tt\" (UniqueName: \"kubernetes.io/projected/c5b5542b-4fb6-4997-9a47-b867bec18c51-kube-api-access-cz7tt\") pod \"nova-cell0-db-create-d2d9x\" (UID: \"c5b5542b-4fb6-4997-9a47-b867bec18c51\") " pod="openstack/nova-cell0-db-create-d2d9x" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.012722 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpkgw\" (UniqueName: \"kubernetes.io/projected/4eaef55c-be30-42da-8330-d032fb44126a-kube-api-access-mpkgw\") pod \"nova-api-db-create-47w78\" (UID: \"4eaef55c-be30-42da-8330-d032fb44126a\") " pod="openstack/nova-api-db-create-47w78" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.014082 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4eaef55c-be30-42da-8330-d032fb44126a-operator-scripts\") pod \"nova-api-db-create-47w78\" (UID: \"4eaef55c-be30-42da-8330-d032fb44126a\") " pod="openstack/nova-api-db-create-47w78" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.021379 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9b87-account-create-update-2kv7w" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.027536 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9b87-account-create-update-2kv7w"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.028639 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.041591 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5r9dq"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.051419 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpkgw\" (UniqueName: \"kubernetes.io/projected/4eaef55c-be30-42da-8330-d032fb44126a-kube-api-access-mpkgw\") pod \"nova-api-db-create-47w78\" (UID: \"4eaef55c-be30-42da-8330-d032fb44126a\") " pod="openstack/nova-api-db-create-47w78" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.121090 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fcd344c-f201-4c81-8186-3e81f8302a36-operator-scripts\") pod \"nova-api-9b87-account-create-update-2kv7w\" (UID: \"5fcd344c-f201-4c81-8186-3e81f8302a36\") " pod="openstack/nova-api-9b87-account-create-update-2kv7w" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.121550 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0e67fab-6009-4e35-9c27-4796b2edad06-operator-scripts\") pod \"nova-cell1-db-create-5r9dq\" (UID: \"f0e67fab-6009-4e35-9c27-4796b2edad06\") " pod="openstack/nova-cell1-db-create-5r9dq" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.121602 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntsv6\" (UniqueName: \"kubernetes.io/projected/f0e67fab-6009-4e35-9c27-4796b2edad06-kube-api-access-ntsv6\") pod \"nova-cell1-db-create-5r9dq\" (UID: \"f0e67fab-6009-4e35-9c27-4796b2edad06\") " pod="openstack/nova-cell1-db-create-5r9dq" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.121669 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5b5542b-4fb6-4997-9a47-b867bec18c51-operator-scripts\") pod \"nova-cell0-db-create-d2d9x\" (UID: \"c5b5542b-4fb6-4997-9a47-b867bec18c51\") " pod="openstack/nova-cell0-db-create-d2d9x" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.121750 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz7tt\" (UniqueName: \"kubernetes.io/projected/c5b5542b-4fb6-4997-9a47-b867bec18c51-kube-api-access-cz7tt\") pod \"nova-cell0-db-create-d2d9x\" (UID: \"c5b5542b-4fb6-4997-9a47-b867bec18c51\") " pod="openstack/nova-cell0-db-create-d2d9x" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.121795 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgrqp\" (UniqueName: \"kubernetes.io/projected/5fcd344c-f201-4c81-8186-3e81f8302a36-kube-api-access-pgrqp\") pod \"nova-api-9b87-account-create-update-2kv7w\" (UID: \"5fcd344c-f201-4c81-8186-3e81f8302a36\") " pod="openstack/nova-api-9b87-account-create-update-2kv7w" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.124777 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5b5542b-4fb6-4997-9a47-b867bec18c51-operator-scripts\") pod \"nova-cell0-db-create-d2d9x\" (UID: \"c5b5542b-4fb6-4997-9a47-b867bec18c51\") " pod="openstack/nova-cell0-db-create-d2d9x" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.127611 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-47w78" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.154832 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-0ae8-account-create-update-m4mh9"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.158011 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.161025 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.171963 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0ae8-account-create-update-m4mh9"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.179216 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz7tt\" (UniqueName: \"kubernetes.io/projected/c5b5542b-4fb6-4997-9a47-b867bec18c51-kube-api-access-cz7tt\") pod \"nova-cell0-db-create-d2d9x\" (UID: \"c5b5542b-4fb6-4997-9a47-b867bec18c51\") " pod="openstack/nova-cell0-db-create-d2d9x" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.224836 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/953df704-40c6-4eca-8f13-b093568d432b-operator-scripts\") pod \"nova-cell0-0ae8-account-create-update-m4mh9\" (UID: \"953df704-40c6-4eca-8f13-b093568d432b\") " pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.224933 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fcd344c-f201-4c81-8186-3e81f8302a36-operator-scripts\") pod \"nova-api-9b87-account-create-update-2kv7w\" (UID: \"5fcd344c-f201-4c81-8186-3e81f8302a36\") " pod="openstack/nova-api-9b87-account-create-update-2kv7w" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.225265 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2zvr\" (UniqueName: \"kubernetes.io/projected/953df704-40c6-4eca-8f13-b093568d432b-kube-api-access-k2zvr\") pod \"nova-cell0-0ae8-account-create-update-m4mh9\" (UID: \"953df704-40c6-4eca-8f13-b093568d432b\") " pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.225378 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0e67fab-6009-4e35-9c27-4796b2edad06-operator-scripts\") pod \"nova-cell1-db-create-5r9dq\" (UID: \"f0e67fab-6009-4e35-9c27-4796b2edad06\") " pod="openstack/nova-cell1-db-create-5r9dq" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.225426 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntsv6\" (UniqueName: \"kubernetes.io/projected/f0e67fab-6009-4e35-9c27-4796b2edad06-kube-api-access-ntsv6\") pod \"nova-cell1-db-create-5r9dq\" (UID: \"f0e67fab-6009-4e35-9c27-4796b2edad06\") " pod="openstack/nova-cell1-db-create-5r9dq" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.225585 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgrqp\" (UniqueName: \"kubernetes.io/projected/5fcd344c-f201-4c81-8186-3e81f8302a36-kube-api-access-pgrqp\") pod \"nova-api-9b87-account-create-update-2kv7w\" (UID: \"5fcd344c-f201-4c81-8186-3e81f8302a36\") " pod="openstack/nova-api-9b87-account-create-update-2kv7w" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.226368 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fcd344c-f201-4c81-8186-3e81f8302a36-operator-scripts\") pod \"nova-api-9b87-account-create-update-2kv7w\" (UID: \"5fcd344c-f201-4c81-8186-3e81f8302a36\") " pod="openstack/nova-api-9b87-account-create-update-2kv7w" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.226431 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0e67fab-6009-4e35-9c27-4796b2edad06-operator-scripts\") pod \"nova-cell1-db-create-5r9dq\" (UID: \"f0e67fab-6009-4e35-9c27-4796b2edad06\") " pod="openstack/nova-cell1-db-create-5r9dq" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.253718 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntsv6\" (UniqueName: \"kubernetes.io/projected/f0e67fab-6009-4e35-9c27-4796b2edad06-kube-api-access-ntsv6\") pod \"nova-cell1-db-create-5r9dq\" (UID: \"f0e67fab-6009-4e35-9c27-4796b2edad06\") " pod="openstack/nova-cell1-db-create-5r9dq" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.253905 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgrqp\" (UniqueName: \"kubernetes.io/projected/5fcd344c-f201-4c81-8186-3e81f8302a36-kube-api-access-pgrqp\") pod \"nova-api-9b87-account-create-update-2kv7w\" (UID: \"5fcd344c-f201-4c81-8186-3e81f8302a36\") " pod="openstack/nova-api-9b87-account-create-update-2kv7w" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.279288 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-d2d9x" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.327547 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/953df704-40c6-4eca-8f13-b093568d432b-operator-scripts\") pod \"nova-cell0-0ae8-account-create-update-m4mh9\" (UID: \"953df704-40c6-4eca-8f13-b093568d432b\") " pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.327635 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2zvr\" (UniqueName: \"kubernetes.io/projected/953df704-40c6-4eca-8f13-b093568d432b-kube-api-access-k2zvr\") pod \"nova-cell0-0ae8-account-create-update-m4mh9\" (UID: \"953df704-40c6-4eca-8f13-b093568d432b\") " pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.328680 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/953df704-40c6-4eca-8f13-b093568d432b-operator-scripts\") pod \"nova-cell0-0ae8-account-create-update-m4mh9\" (UID: \"953df704-40c6-4eca-8f13-b093568d432b\") " pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.343194 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5r9dq" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.355459 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.356060 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-d9ca-account-create-update-48tvt"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.356173 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2zvr\" (UniqueName: \"kubernetes.io/projected/953df704-40c6-4eca-8f13-b093568d432b-kube-api-access-k2zvr\") pod \"nova-cell0-0ae8-account-create-update-m4mh9\" (UID: \"953df704-40c6-4eca-8f13-b093568d432b\") " pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" Dec 15 08:51:56 crc kubenswrapper[4876]: E1215 08:51:56.356408 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1c64020-1894-4004-9a0e-b7e87292534c" containerName="registry-server" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.356422 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1c64020-1894-4004-9a0e-b7e87292534c" containerName="registry-server" Dec 15 08:51:56 crc kubenswrapper[4876]: E1215 08:51:56.356444 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1c64020-1894-4004-9a0e-b7e87292534c" containerName="extract-content" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.356450 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1c64020-1894-4004-9a0e-b7e87292534c" containerName="extract-content" Dec 15 08:51:56 crc kubenswrapper[4876]: E1215 08:51:56.356463 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1c64020-1894-4004-9a0e-b7e87292534c" containerName="extract-utilities" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.356469 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1c64020-1894-4004-9a0e-b7e87292534c" containerName="extract-utilities" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.356657 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1c64020-1894-4004-9a0e-b7e87292534c" containerName="registry-server" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.357269 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.359513 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.367767 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-d9ca-account-create-update-48tvt"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.387535 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9b87-account-create-update-2kv7w" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.428700 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-catalog-content\") pod \"b1c64020-1894-4004-9a0e-b7e87292534c\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.428964 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-utilities\") pod \"b1c64020-1894-4004-9a0e-b7e87292534c\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.429045 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phcfq\" (UniqueName: \"kubernetes.io/projected/b1c64020-1894-4004-9a0e-b7e87292534c-kube-api-access-phcfq\") pod \"b1c64020-1894-4004-9a0e-b7e87292534c\" (UID: \"b1c64020-1894-4004-9a0e-b7e87292534c\") " Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.429376 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzswq\" (UniqueName: \"kubernetes.io/projected/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-kube-api-access-qzswq\") pod \"nova-cell1-d9ca-account-create-update-48tvt\" (UID: \"223cf08e-b426-4b16-a6ba-a5f0c40e8b84\") " pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.429453 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-operator-scripts\") pod \"nova-cell1-d9ca-account-create-update-48tvt\" (UID: \"223cf08e-b426-4b16-a6ba-a5f0c40e8b84\") " pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.430690 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-utilities" (OuterVolumeSpecName: "utilities") pod "b1c64020-1894-4004-9a0e-b7e87292534c" (UID: "b1c64020-1894-4004-9a0e-b7e87292534c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.434352 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1c64020-1894-4004-9a0e-b7e87292534c-kube-api-access-phcfq" (OuterVolumeSpecName: "kube-api-access-phcfq") pod "b1c64020-1894-4004-9a0e-b7e87292534c" (UID: "b1c64020-1894-4004-9a0e-b7e87292534c"). InnerVolumeSpecName "kube-api-access-phcfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.465462 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b1c64020-1894-4004-9a0e-b7e87292534c" (UID: "b1c64020-1894-4004-9a0e-b7e87292534c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.532359 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-operator-scripts\") pod \"nova-cell1-d9ca-account-create-update-48tvt\" (UID: \"223cf08e-b426-4b16-a6ba-a5f0c40e8b84\") " pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.532561 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzswq\" (UniqueName: \"kubernetes.io/projected/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-kube-api-access-qzswq\") pod \"nova-cell1-d9ca-account-create-update-48tvt\" (UID: \"223cf08e-b426-4b16-a6ba-a5f0c40e8b84\") " pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.532645 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.532661 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1c64020-1894-4004-9a0e-b7e87292534c-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.532673 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phcfq\" (UniqueName: \"kubernetes.io/projected/b1c64020-1894-4004-9a0e-b7e87292534c-kube-api-access-phcfq\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.533749 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-operator-scripts\") pod \"nova-cell1-d9ca-account-create-update-48tvt\" (UID: \"223cf08e-b426-4b16-a6ba-a5f0c40e8b84\") " pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.554435 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzswq\" (UniqueName: \"kubernetes.io/projected/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-kube-api-access-qzswq\") pod \"nova-cell1-d9ca-account-create-update-48tvt\" (UID: \"223cf08e-b426-4b16-a6ba-a5f0c40e8b84\") " pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.640028 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.671414 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-47w78"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.681401 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.845747 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-d2d9x"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.855239 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-47w78" event={"ID":"4eaef55c-be30-42da-8330-d032fb44126a","Type":"ContainerStarted","Data":"0f40f778703f411c064f89718e97161fc5e64c3d91b98a3691bda0bb753880e3"} Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.857449 4876 generic.go:334] "Generic (PLEG): container finished" podID="b1c64020-1894-4004-9a0e-b7e87292534c" containerID="1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d" exitCode=0 Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.857523 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2rnb" event={"ID":"b1c64020-1894-4004-9a0e-b7e87292534c","Type":"ContainerDied","Data":"1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d"} Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.857558 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2rnb" event={"ID":"b1c64020-1894-4004-9a0e-b7e87292534c","Type":"ContainerDied","Data":"1f525e27ef71a10c91dd57a34b6368e7ee20bdcb33ada2201d0c5a617a5208a6"} Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.857578 4876 scope.go:117] "RemoveContainer" containerID="1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.857753 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q2rnb" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.888258 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q2rnb"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.896409 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q2rnb"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.898204 4876 scope.go:117] "RemoveContainer" containerID="5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de" Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.937644 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-5r9dq"] Dec 15 08:51:56 crc kubenswrapper[4876]: I1215 08:51:56.950730 4876 scope.go:117] "RemoveContainer" containerID="6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3" Dec 15 08:51:56 crc kubenswrapper[4876]: W1215 08:51:56.953509 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0e67fab_6009_4e35_9c27_4796b2edad06.slice/crio-c68bd7ef9182e0ef973873e1efc99b87fc9401a0e63b2c8330a7db58ee9e66c4 WatchSource:0}: Error finding container c68bd7ef9182e0ef973873e1efc99b87fc9401a0e63b2c8330a7db58ee9e66c4: Status 404 returned error can't find the container with id c68bd7ef9182e0ef973873e1efc99b87fc9401a0e63b2c8330a7db58ee9e66c4 Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.027651 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0ae8-account-create-update-m4mh9"] Dec 15 08:51:57 crc kubenswrapper[4876]: W1215 08:51:57.027839 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fcd344c_f201_4c81_8186_3e81f8302a36.slice/crio-1cc669dd2f14cadd010580b19e8b35ee4dae18cda328284b072dfeb6c36202c5 WatchSource:0}: Error finding container 1cc669dd2f14cadd010580b19e8b35ee4dae18cda328284b072dfeb6c36202c5: Status 404 returned error can't find the container with id 1cc669dd2f14cadd010580b19e8b35ee4dae18cda328284b072dfeb6c36202c5 Dec 15 08:51:57 crc kubenswrapper[4876]: W1215 08:51:57.035640 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod953df704_40c6_4eca_8f13_b093568d432b.slice/crio-80136ac6937090ed26b6ae6d12622081df36f7bf6a59a055cb2887b1c87e21e5 WatchSource:0}: Error finding container 80136ac6937090ed26b6ae6d12622081df36f7bf6a59a055cb2887b1c87e21e5: Status 404 returned error can't find the container with id 80136ac6937090ed26b6ae6d12622081df36f7bf6a59a055cb2887b1c87e21e5 Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.041544 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9b87-account-create-update-2kv7w"] Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.101363 4876 scope.go:117] "RemoveContainer" containerID="1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d" Dec 15 08:51:57 crc kubenswrapper[4876]: E1215 08:51:57.102456 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d\": container with ID starting with 1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d not found: ID does not exist" containerID="1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d" Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.102504 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d"} err="failed to get container status \"1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d\": rpc error: code = NotFound desc = could not find container \"1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d\": container with ID starting with 1c2eda576e720a3f6898136de511e6c440b2f7c3928b58e80a536e57371e598d not found: ID does not exist" Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.102530 4876 scope.go:117] "RemoveContainer" containerID="5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de" Dec 15 08:51:57 crc kubenswrapper[4876]: E1215 08:51:57.102924 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de\": container with ID starting with 5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de not found: ID does not exist" containerID="5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de" Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.102970 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de"} err="failed to get container status \"5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de\": rpc error: code = NotFound desc = could not find container \"5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de\": container with ID starting with 5de3967b3897081f66200165b72412e9b4f8a2de70e5af59382608ec333b58de not found: ID does not exist" Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.102993 4876 scope.go:117] "RemoveContainer" containerID="6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3" Dec 15 08:51:57 crc kubenswrapper[4876]: E1215 08:51:57.103334 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3\": container with ID starting with 6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3 not found: ID does not exist" containerID="6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3" Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.103361 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3"} err="failed to get container status \"6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3\": rpc error: code = NotFound desc = could not find container \"6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3\": container with ID starting with 6f0a14fcc4080ddc8d0de3d163b024389dc1bd8d43bc5a9d90fd2474334ce9c3 not found: ID does not exist" Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.230125 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-d9ca-account-create-update-48tvt"] Dec 15 08:51:57 crc kubenswrapper[4876]: W1215 08:51:57.242358 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod223cf08e_b426_4b16_a6ba_a5f0c40e8b84.slice/crio-4cec973c95ab9b32572ab92211c824e034fec0b991bfa3ea7cd382a2cf30131b WatchSource:0}: Error finding container 4cec973c95ab9b32572ab92211c824e034fec0b991bfa3ea7cd382a2cf30131b: Status 404 returned error can't find the container with id 4cec973c95ab9b32572ab92211c824e034fec0b991bfa3ea7cd382a2cf30131b Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.866379 4876 generic.go:334] "Generic (PLEG): container finished" podID="953df704-40c6-4eca-8f13-b093568d432b" containerID="9df3929b52d68ab57b3705e184a5bb23f4f5f0872a16abcfc91c5ff84756fae8" exitCode=0 Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.866445 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" event={"ID":"953df704-40c6-4eca-8f13-b093568d432b","Type":"ContainerDied","Data":"9df3929b52d68ab57b3705e184a5bb23f4f5f0872a16abcfc91c5ff84756fae8"} Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.866471 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" event={"ID":"953df704-40c6-4eca-8f13-b093568d432b","Type":"ContainerStarted","Data":"80136ac6937090ed26b6ae6d12622081df36f7bf6a59a055cb2887b1c87e21e5"} Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.868206 4876 generic.go:334] "Generic (PLEG): container finished" podID="4eaef55c-be30-42da-8330-d032fb44126a" containerID="8ca56a8617bdd7a83636537f726a90544d4e9d6d4243444649cafc83ea495083" exitCode=0 Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.868279 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-47w78" event={"ID":"4eaef55c-be30-42da-8330-d032fb44126a","Type":"ContainerDied","Data":"8ca56a8617bdd7a83636537f726a90544d4e9d6d4243444649cafc83ea495083"} Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.869916 4876 generic.go:334] "Generic (PLEG): container finished" podID="5fcd344c-f201-4c81-8186-3e81f8302a36" containerID="c2aec999d98831d9eff88709e2d78ab08404585bea81efb1b2be22bc6c1add6e" exitCode=0 Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.869971 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9b87-account-create-update-2kv7w" event={"ID":"5fcd344c-f201-4c81-8186-3e81f8302a36","Type":"ContainerDied","Data":"c2aec999d98831d9eff88709e2d78ab08404585bea81efb1b2be22bc6c1add6e"} Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.869991 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9b87-account-create-update-2kv7w" event={"ID":"5fcd344c-f201-4c81-8186-3e81f8302a36","Type":"ContainerStarted","Data":"1cc669dd2f14cadd010580b19e8b35ee4dae18cda328284b072dfeb6c36202c5"} Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.871294 4876 generic.go:334] "Generic (PLEG): container finished" podID="f0e67fab-6009-4e35-9c27-4796b2edad06" containerID="78947f7dd5b1daef1a8470d69630a6955a70ba18f58ff758ed6f7044b49f8f29" exitCode=0 Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.871361 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5r9dq" event={"ID":"f0e67fab-6009-4e35-9c27-4796b2edad06","Type":"ContainerDied","Data":"78947f7dd5b1daef1a8470d69630a6955a70ba18f58ff758ed6f7044b49f8f29"} Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.871389 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5r9dq" event={"ID":"f0e67fab-6009-4e35-9c27-4796b2edad06","Type":"ContainerStarted","Data":"c68bd7ef9182e0ef973873e1efc99b87fc9401a0e63b2c8330a7db58ee9e66c4"} Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.873530 4876 generic.go:334] "Generic (PLEG): container finished" podID="223cf08e-b426-4b16-a6ba-a5f0c40e8b84" containerID="16ae18cde3b96f71b8208d640df0ea38786910a78569dda822df8e43d7e27205" exitCode=0 Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.873570 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" event={"ID":"223cf08e-b426-4b16-a6ba-a5f0c40e8b84","Type":"ContainerDied","Data":"16ae18cde3b96f71b8208d640df0ea38786910a78569dda822df8e43d7e27205"} Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.873596 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" event={"ID":"223cf08e-b426-4b16-a6ba-a5f0c40e8b84","Type":"ContainerStarted","Data":"4cec973c95ab9b32572ab92211c824e034fec0b991bfa3ea7cd382a2cf30131b"} Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.874888 4876 generic.go:334] "Generic (PLEG): container finished" podID="c5b5542b-4fb6-4997-9a47-b867bec18c51" containerID="040522678f9118e4de447e03daa1ea3039e2a5a62e3182154a1129c034633d32" exitCode=0 Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.874936 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-d2d9x" event={"ID":"c5b5542b-4fb6-4997-9a47-b867bec18c51","Type":"ContainerDied","Data":"040522678f9118e4de447e03daa1ea3039e2a5a62e3182154a1129c034633d32"} Dec 15 08:51:57 crc kubenswrapper[4876]: I1215 08:51:57.874950 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-d2d9x" event={"ID":"c5b5542b-4fb6-4997-9a47-b867bec18c51","Type":"ContainerStarted","Data":"8ef9d347f710aff3501c956cf4850695234d6e396f2e98713ad175f46be05f44"} Dec 15 08:51:58 crc kubenswrapper[4876]: I1215 08:51:58.716182 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1c64020-1894-4004-9a0e-b7e87292534c" path="/var/lib/kubelet/pods/b1c64020-1894-4004-9a0e-b7e87292534c/volumes" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.247014 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.384783 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-operator-scripts\") pod \"223cf08e-b426-4b16-a6ba-a5f0c40e8b84\" (UID: \"223cf08e-b426-4b16-a6ba-a5f0c40e8b84\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.384990 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzswq\" (UniqueName: \"kubernetes.io/projected/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-kube-api-access-qzswq\") pod \"223cf08e-b426-4b16-a6ba-a5f0c40e8b84\" (UID: \"223cf08e-b426-4b16-a6ba-a5f0c40e8b84\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.385687 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "223cf08e-b426-4b16-a6ba-a5f0c40e8b84" (UID: "223cf08e-b426-4b16-a6ba-a5f0c40e8b84"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.391574 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-kube-api-access-qzswq" (OuterVolumeSpecName: "kube-api-access-qzswq") pod "223cf08e-b426-4b16-a6ba-a5f0c40e8b84" (UID: "223cf08e-b426-4b16-a6ba-a5f0c40e8b84"). InnerVolumeSpecName "kube-api-access-qzswq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.474277 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5r9dq" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.480222 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.487020 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzswq\" (UniqueName: \"kubernetes.io/projected/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-kube-api-access-qzswq\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.487158 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/223cf08e-b426-4b16-a6ba-a5f0c40e8b84-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.490484 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-d2d9x" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.498667 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9b87-account-create-update-2kv7w" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.513411 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-47w78" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.589056 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz7tt\" (UniqueName: \"kubernetes.io/projected/c5b5542b-4fb6-4997-9a47-b867bec18c51-kube-api-access-cz7tt\") pod \"c5b5542b-4fb6-4997-9a47-b867bec18c51\" (UID: \"c5b5542b-4fb6-4997-9a47-b867bec18c51\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.589333 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fcd344c-f201-4c81-8186-3e81f8302a36-operator-scripts\") pod \"5fcd344c-f201-4c81-8186-3e81f8302a36\" (UID: \"5fcd344c-f201-4c81-8186-3e81f8302a36\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.589409 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0e67fab-6009-4e35-9c27-4796b2edad06-operator-scripts\") pod \"f0e67fab-6009-4e35-9c27-4796b2edad06\" (UID: \"f0e67fab-6009-4e35-9c27-4796b2edad06\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.589488 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/953df704-40c6-4eca-8f13-b093568d432b-operator-scripts\") pod \"953df704-40c6-4eca-8f13-b093568d432b\" (UID: \"953df704-40c6-4eca-8f13-b093568d432b\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.589685 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5b5542b-4fb6-4997-9a47-b867bec18c51-operator-scripts\") pod \"c5b5542b-4fb6-4997-9a47-b867bec18c51\" (UID: \"c5b5542b-4fb6-4997-9a47-b867bec18c51\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.589805 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpkgw\" (UniqueName: \"kubernetes.io/projected/4eaef55c-be30-42da-8330-d032fb44126a-kube-api-access-mpkgw\") pod \"4eaef55c-be30-42da-8330-d032fb44126a\" (UID: \"4eaef55c-be30-42da-8330-d032fb44126a\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.589932 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntsv6\" (UniqueName: \"kubernetes.io/projected/f0e67fab-6009-4e35-9c27-4796b2edad06-kube-api-access-ntsv6\") pod \"f0e67fab-6009-4e35-9c27-4796b2edad06\" (UID: \"f0e67fab-6009-4e35-9c27-4796b2edad06\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.589717 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fcd344c-f201-4c81-8186-3e81f8302a36-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5fcd344c-f201-4c81-8186-3e81f8302a36" (UID: "5fcd344c-f201-4c81-8186-3e81f8302a36"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.590057 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4eaef55c-be30-42da-8330-d032fb44126a-operator-scripts\") pod \"4eaef55c-be30-42da-8330-d032fb44126a\" (UID: \"4eaef55c-be30-42da-8330-d032fb44126a\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.590184 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2zvr\" (UniqueName: \"kubernetes.io/projected/953df704-40c6-4eca-8f13-b093568d432b-kube-api-access-k2zvr\") pod \"953df704-40c6-4eca-8f13-b093568d432b\" (UID: \"953df704-40c6-4eca-8f13-b093568d432b\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.590188 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/953df704-40c6-4eca-8f13-b093568d432b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "953df704-40c6-4eca-8f13-b093568d432b" (UID: "953df704-40c6-4eca-8f13-b093568d432b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.590239 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5b5542b-4fb6-4997-9a47-b867bec18c51-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c5b5542b-4fb6-4997-9a47-b867bec18c51" (UID: "c5b5542b-4fb6-4997-9a47-b867bec18c51"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.590243 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgrqp\" (UniqueName: \"kubernetes.io/projected/5fcd344c-f201-4c81-8186-3e81f8302a36-kube-api-access-pgrqp\") pod \"5fcd344c-f201-4c81-8186-3e81f8302a36\" (UID: \"5fcd344c-f201-4c81-8186-3e81f8302a36\") " Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.590333 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0e67fab-6009-4e35-9c27-4796b2edad06-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f0e67fab-6009-4e35-9c27-4796b2edad06" (UID: "f0e67fab-6009-4e35-9c27-4796b2edad06"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.590800 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4eaef55c-be30-42da-8330-d032fb44126a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4eaef55c-be30-42da-8330-d032fb44126a" (UID: "4eaef55c-be30-42da-8330-d032fb44126a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.591063 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5b5542b-4fb6-4997-9a47-b867bec18c51-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.591176 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4eaef55c-be30-42da-8330-d032fb44126a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.591255 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fcd344c-f201-4c81-8186-3e81f8302a36-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.591342 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0e67fab-6009-4e35-9c27-4796b2edad06-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.591419 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/953df704-40c6-4eca-8f13-b093568d432b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.593566 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fcd344c-f201-4c81-8186-3e81f8302a36-kube-api-access-pgrqp" (OuterVolumeSpecName: "kube-api-access-pgrqp") pod "5fcd344c-f201-4c81-8186-3e81f8302a36" (UID: "5fcd344c-f201-4c81-8186-3e81f8302a36"). InnerVolumeSpecName "kube-api-access-pgrqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.593622 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eaef55c-be30-42da-8330-d032fb44126a-kube-api-access-mpkgw" (OuterVolumeSpecName: "kube-api-access-mpkgw") pod "4eaef55c-be30-42da-8330-d032fb44126a" (UID: "4eaef55c-be30-42da-8330-d032fb44126a"). InnerVolumeSpecName "kube-api-access-mpkgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.593739 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/953df704-40c6-4eca-8f13-b093568d432b-kube-api-access-k2zvr" (OuterVolumeSpecName: "kube-api-access-k2zvr") pod "953df704-40c6-4eca-8f13-b093568d432b" (UID: "953df704-40c6-4eca-8f13-b093568d432b"). InnerVolumeSpecName "kube-api-access-k2zvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.593897 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5b5542b-4fb6-4997-9a47-b867bec18c51-kube-api-access-cz7tt" (OuterVolumeSpecName: "kube-api-access-cz7tt") pod "c5b5542b-4fb6-4997-9a47-b867bec18c51" (UID: "c5b5542b-4fb6-4997-9a47-b867bec18c51"). InnerVolumeSpecName "kube-api-access-cz7tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.594023 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0e67fab-6009-4e35-9c27-4796b2edad06-kube-api-access-ntsv6" (OuterVolumeSpecName: "kube-api-access-ntsv6") pod "f0e67fab-6009-4e35-9c27-4796b2edad06" (UID: "f0e67fab-6009-4e35-9c27-4796b2edad06"). InnerVolumeSpecName "kube-api-access-ntsv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.693068 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpkgw\" (UniqueName: \"kubernetes.io/projected/4eaef55c-be30-42da-8330-d032fb44126a-kube-api-access-mpkgw\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.693122 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntsv6\" (UniqueName: \"kubernetes.io/projected/f0e67fab-6009-4e35-9c27-4796b2edad06-kube-api-access-ntsv6\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.693164 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2zvr\" (UniqueName: \"kubernetes.io/projected/953df704-40c6-4eca-8f13-b093568d432b-kube-api-access-k2zvr\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.693174 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgrqp\" (UniqueName: \"kubernetes.io/projected/5fcd344c-f201-4c81-8186-3e81f8302a36-kube-api-access-pgrqp\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.693185 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz7tt\" (UniqueName: \"kubernetes.io/projected/c5b5542b-4fb6-4997-9a47-b867bec18c51-kube-api-access-cz7tt\") on node \"crc\" DevicePath \"\"" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.895383 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-5r9dq" event={"ID":"f0e67fab-6009-4e35-9c27-4796b2edad06","Type":"ContainerDied","Data":"c68bd7ef9182e0ef973873e1efc99b87fc9401a0e63b2c8330a7db58ee9e66c4"} Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.895428 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c68bd7ef9182e0ef973873e1efc99b87fc9401a0e63b2c8330a7db58ee9e66c4" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.895389 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-5r9dq" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.896981 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-d2d9x" event={"ID":"c5b5542b-4fb6-4997-9a47-b867bec18c51","Type":"ContainerDied","Data":"8ef9d347f710aff3501c956cf4850695234d6e396f2e98713ad175f46be05f44"} Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.897010 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-d2d9x" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.897014 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ef9d347f710aff3501c956cf4850695234d6e396f2e98713ad175f46be05f44" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.898784 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" event={"ID":"953df704-40c6-4eca-8f13-b093568d432b","Type":"ContainerDied","Data":"80136ac6937090ed26b6ae6d12622081df36f7bf6a59a055cb2887b1c87e21e5"} Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.898811 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80136ac6937090ed26b6ae6d12622081df36f7bf6a59a055cb2887b1c87e21e5" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.898867 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0ae8-account-create-update-m4mh9" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.900664 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-47w78" event={"ID":"4eaef55c-be30-42da-8330-d032fb44126a","Type":"ContainerDied","Data":"0f40f778703f411c064f89718e97161fc5e64c3d91b98a3691bda0bb753880e3"} Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.900714 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f40f778703f411c064f89718e97161fc5e64c3d91b98a3691bda0bb753880e3" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.900892 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-47w78" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.901927 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9b87-account-create-update-2kv7w" event={"ID":"5fcd344c-f201-4c81-8186-3e81f8302a36","Type":"ContainerDied","Data":"1cc669dd2f14cadd010580b19e8b35ee4dae18cda328284b072dfeb6c36202c5"} Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.901947 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9b87-account-create-update-2kv7w" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.901953 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cc669dd2f14cadd010580b19e8b35ee4dae18cda328284b072dfeb6c36202c5" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.903639 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" event={"ID":"223cf08e-b426-4b16-a6ba-a5f0c40e8b84","Type":"ContainerDied","Data":"4cec973c95ab9b32572ab92211c824e034fec0b991bfa3ea7cd382a2cf30131b"} Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.903854 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cec973c95ab9b32572ab92211c824e034fec0b991bfa3ea7cd382a2cf30131b" Dec 15 08:51:59 crc kubenswrapper[4876]: I1215 08:51:59.903826 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-d9ca-account-create-update-48tvt" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.348000 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6zsmd"] Dec 15 08:52:01 crc kubenswrapper[4876]: E1215 08:52:01.348782 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0e67fab-6009-4e35-9c27-4796b2edad06" containerName="mariadb-database-create" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.348799 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0e67fab-6009-4e35-9c27-4796b2edad06" containerName="mariadb-database-create" Dec 15 08:52:01 crc kubenswrapper[4876]: E1215 08:52:01.348819 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eaef55c-be30-42da-8330-d032fb44126a" containerName="mariadb-database-create" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.348828 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eaef55c-be30-42da-8330-d032fb44126a" containerName="mariadb-database-create" Dec 15 08:52:01 crc kubenswrapper[4876]: E1215 08:52:01.348846 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fcd344c-f201-4c81-8186-3e81f8302a36" containerName="mariadb-account-create-update" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.348853 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fcd344c-f201-4c81-8186-3e81f8302a36" containerName="mariadb-account-create-update" Dec 15 08:52:01 crc kubenswrapper[4876]: E1215 08:52:01.348875 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5b5542b-4fb6-4997-9a47-b867bec18c51" containerName="mariadb-database-create" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.348883 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5b5542b-4fb6-4997-9a47-b867bec18c51" containerName="mariadb-database-create" Dec 15 08:52:01 crc kubenswrapper[4876]: E1215 08:52:01.348893 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="223cf08e-b426-4b16-a6ba-a5f0c40e8b84" containerName="mariadb-account-create-update" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.348901 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="223cf08e-b426-4b16-a6ba-a5f0c40e8b84" containerName="mariadb-account-create-update" Dec 15 08:52:01 crc kubenswrapper[4876]: E1215 08:52:01.348913 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="953df704-40c6-4eca-8f13-b093568d432b" containerName="mariadb-account-create-update" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.348921 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="953df704-40c6-4eca-8f13-b093568d432b" containerName="mariadb-account-create-update" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.349156 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5b5542b-4fb6-4997-9a47-b867bec18c51" containerName="mariadb-database-create" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.349169 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0e67fab-6009-4e35-9c27-4796b2edad06" containerName="mariadb-database-create" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.349181 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fcd344c-f201-4c81-8186-3e81f8302a36" containerName="mariadb-account-create-update" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.349201 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="223cf08e-b426-4b16-a6ba-a5f0c40e8b84" containerName="mariadb-account-create-update" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.349212 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="953df704-40c6-4eca-8f13-b093568d432b" containerName="mariadb-account-create-update" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.349223 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eaef55c-be30-42da-8330-d032fb44126a" containerName="mariadb-database-create" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.349909 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.353358 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.353765 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-stqmk" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.354915 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.374940 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6zsmd"] Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.418201 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-scripts\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.418246 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-config-data\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.418293 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m758x\" (UniqueName: \"kubernetes.io/projected/11b68586-fe23-4bf7-9880-17f32a5fc121-kube-api-access-m758x\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.418397 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.520201 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-scripts\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.520250 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-config-data\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.520300 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m758x\" (UniqueName: \"kubernetes.io/projected/11b68586-fe23-4bf7-9880-17f32a5fc121-kube-api-access-m758x\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.520359 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.527274 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.527334 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-config-data\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.529710 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-scripts\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.539321 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m758x\" (UniqueName: \"kubernetes.io/projected/11b68586-fe23-4bf7-9880-17f32a5fc121-kube-api-access-m758x\") pod \"nova-cell0-conductor-db-sync-6zsmd\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:01 crc kubenswrapper[4876]: I1215 08:52:01.672793 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:02 crc kubenswrapper[4876]: I1215 08:52:02.108404 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6zsmd"] Dec 15 08:52:02 crc kubenswrapper[4876]: W1215 08:52:02.119438 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11b68586_fe23_4bf7_9880_17f32a5fc121.slice/crio-29b390fa90f06fade39f163ea673bcb269a14bbad6d7ab968718d7fd088715f9 WatchSource:0}: Error finding container 29b390fa90f06fade39f163ea673bcb269a14bbad6d7ab968718d7fd088715f9: Status 404 returned error can't find the container with id 29b390fa90f06fade39f163ea673bcb269a14bbad6d7ab968718d7fd088715f9 Dec 15 08:52:02 crc kubenswrapper[4876]: I1215 08:52:02.931307 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6zsmd" event={"ID":"11b68586-fe23-4bf7-9880-17f32a5fc121","Type":"ContainerStarted","Data":"29b390fa90f06fade39f163ea673bcb269a14bbad6d7ab968718d7fd088715f9"} Dec 15 08:52:05 crc kubenswrapper[4876]: I1215 08:52:05.709234 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:52:05 crc kubenswrapper[4876]: E1215 08:52:05.709765 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:52:12 crc kubenswrapper[4876]: I1215 08:52:12.012243 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6zsmd" event={"ID":"11b68586-fe23-4bf7-9880-17f32a5fc121","Type":"ContainerStarted","Data":"361fc22060ff41baf5b749911caa16711f74ea6f5d15c69ee892bc5abee4a52f"} Dec 15 08:52:12 crc kubenswrapper[4876]: I1215 08:52:12.035788 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-6zsmd" podStartSLOduration=2.361144532 podStartE2EDuration="11.035760039s" podCreationTimestamp="2025-12-15 08:52:01 +0000 UTC" firstStartedPulling="2025-12-15 08:52:02.121378015 +0000 UTC m=+7247.692520916" lastFinishedPulling="2025-12-15 08:52:10.795993512 +0000 UTC m=+7256.367136423" observedRunningTime="2025-12-15 08:52:12.025691918 +0000 UTC m=+7257.596834829" watchObservedRunningTime="2025-12-15 08:52:12.035760039 +0000 UTC m=+7257.606902950" Dec 15 08:52:17 crc kubenswrapper[4876]: I1215 08:52:17.056516 4876 generic.go:334] "Generic (PLEG): container finished" podID="11b68586-fe23-4bf7-9880-17f32a5fc121" containerID="361fc22060ff41baf5b749911caa16711f74ea6f5d15c69ee892bc5abee4a52f" exitCode=0 Dec 15 08:52:17 crc kubenswrapper[4876]: I1215 08:52:17.056599 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6zsmd" event={"ID":"11b68586-fe23-4bf7-9880-17f32a5fc121","Type":"ContainerDied","Data":"361fc22060ff41baf5b749911caa16711f74ea6f5d15c69ee892bc5abee4a52f"} Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.364061 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.550251 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-scripts\") pod \"11b68586-fe23-4bf7-9880-17f32a5fc121\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.550339 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-combined-ca-bundle\") pod \"11b68586-fe23-4bf7-9880-17f32a5fc121\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.550362 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-config-data\") pod \"11b68586-fe23-4bf7-9880-17f32a5fc121\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.550389 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m758x\" (UniqueName: \"kubernetes.io/projected/11b68586-fe23-4bf7-9880-17f32a5fc121-kube-api-access-m758x\") pod \"11b68586-fe23-4bf7-9880-17f32a5fc121\" (UID: \"11b68586-fe23-4bf7-9880-17f32a5fc121\") " Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.556260 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-scripts" (OuterVolumeSpecName: "scripts") pod "11b68586-fe23-4bf7-9880-17f32a5fc121" (UID: "11b68586-fe23-4bf7-9880-17f32a5fc121"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.557494 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11b68586-fe23-4bf7-9880-17f32a5fc121-kube-api-access-m758x" (OuterVolumeSpecName: "kube-api-access-m758x") pod "11b68586-fe23-4bf7-9880-17f32a5fc121" (UID: "11b68586-fe23-4bf7-9880-17f32a5fc121"). InnerVolumeSpecName "kube-api-access-m758x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.575151 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-config-data" (OuterVolumeSpecName: "config-data") pod "11b68586-fe23-4bf7-9880-17f32a5fc121" (UID: "11b68586-fe23-4bf7-9880-17f32a5fc121"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.575958 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11b68586-fe23-4bf7-9880-17f32a5fc121" (UID: "11b68586-fe23-4bf7-9880-17f32a5fc121"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.651939 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.651978 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.651990 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m758x\" (UniqueName: \"kubernetes.io/projected/11b68586-fe23-4bf7-9880-17f32a5fc121-kube-api-access-m758x\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:18 crc kubenswrapper[4876]: I1215 08:52:18.652000 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11b68586-fe23-4bf7-9880-17f32a5fc121-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:18 crc kubenswrapper[4876]: E1215 08:52:18.802180 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11b68586_fe23_4bf7_9880_17f32a5fc121.slice\": RecentStats: unable to find data in memory cache]" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.082085 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6zsmd" event={"ID":"11b68586-fe23-4bf7-9880-17f32a5fc121","Type":"ContainerDied","Data":"29b390fa90f06fade39f163ea673bcb269a14bbad6d7ab968718d7fd088715f9"} Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.082469 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29b390fa90f06fade39f163ea673bcb269a14bbad6d7ab968718d7fd088715f9" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.082256 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6zsmd" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.147606 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 08:52:19 crc kubenswrapper[4876]: E1215 08:52:19.156289 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11b68586-fe23-4bf7-9880-17f32a5fc121" containerName="nova-cell0-conductor-db-sync" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.156335 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="11b68586-fe23-4bf7-9880-17f32a5fc121" containerName="nova-cell0-conductor-db-sync" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.156643 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="11b68586-fe23-4bf7-9880-17f32a5fc121" containerName="nova-cell0-conductor-db-sync" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.157553 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.160353 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-stqmk" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.163053 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.164547 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.262051 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m42b6\" (UniqueName: \"kubernetes.io/projected/a17c448b-7ca1-4969-8809-686f22a724eb-kube-api-access-m42b6\") pod \"nova-cell0-conductor-0\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.262259 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.262286 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.365542 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.365595 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.365623 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m42b6\" (UniqueName: \"kubernetes.io/projected/a17c448b-7ca1-4969-8809-686f22a724eb-kube-api-access-m42b6\") pod \"nova-cell0-conductor-0\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.370035 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.370178 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.394915 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m42b6\" (UniqueName: \"kubernetes.io/projected/a17c448b-7ca1-4969-8809-686f22a724eb-kube-api-access-m42b6\") pod \"nova-cell0-conductor-0\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.471896 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:19 crc kubenswrapper[4876]: I1215 08:52:19.932679 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 08:52:20 crc kubenswrapper[4876]: I1215 08:52:20.093775 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a17c448b-7ca1-4969-8809-686f22a724eb","Type":"ContainerStarted","Data":"4bfe7a7aa38a426ba5dd6bbcaaab964062d100e2f0262b683c0173a82b9fde1a"} Dec 15 08:52:20 crc kubenswrapper[4876]: I1215 08:52:20.709449 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:52:20 crc kubenswrapper[4876]: E1215 08:52:20.709899 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:52:21 crc kubenswrapper[4876]: I1215 08:52:21.105793 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a17c448b-7ca1-4969-8809-686f22a724eb","Type":"ContainerStarted","Data":"512e54014c71727ad9498e29ec20dfd11e88c241c89b15abfb0299ec03d618be"} Dec 15 08:52:21 crc kubenswrapper[4876]: I1215 08:52:21.106005 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:21 crc kubenswrapper[4876]: I1215 08:52:21.136276 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.136246551 podStartE2EDuration="2.136246551s" podCreationTimestamp="2025-12-15 08:52:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:52:21.126890359 +0000 UTC m=+7266.698033290" watchObservedRunningTime="2025-12-15 08:52:21.136246551 +0000 UTC m=+7266.707389462" Dec 15 08:52:24 crc kubenswrapper[4876]: I1215 08:52:24.434287 4876 scope.go:117] "RemoveContainer" containerID="64c497f594d2a2da96bb3548a931ad72fbe29917889e0da7ef54103d18d34e3a" Dec 15 08:52:29 crc kubenswrapper[4876]: I1215 08:52:29.501306 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 15 08:52:29 crc kubenswrapper[4876]: I1215 08:52:29.989068 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-dtrjw"] Dec 15 08:52:29 crc kubenswrapper[4876]: I1215 08:52:29.990286 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:29 crc kubenswrapper[4876]: I1215 08:52:29.992613 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 15 08:52:29 crc kubenswrapper[4876]: I1215 08:52:29.992814 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.025588 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-dtrjw"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.108032 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6bss\" (UniqueName: \"kubernetes.io/projected/f173f1f0-710b-49df-b7d4-41407cc721ee-kube-api-access-v6bss\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.108074 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.108133 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-config-data\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.108362 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-scripts\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.199861 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.201571 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.209673 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6bss\" (UniqueName: \"kubernetes.io/projected/f173f1f0-710b-49df-b7d4-41407cc721ee-kube-api-access-v6bss\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.209710 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.209755 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-config-data\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.209827 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-scripts\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.217381 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-config-data\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.218285 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.223736 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-scripts\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.223944 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.254136 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.278792 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6bss\" (UniqueName: \"kubernetes.io/projected/f173f1f0-710b-49df-b7d4-41407cc721ee-kube-api-access-v6bss\") pod \"nova-cell0-cell-mapping-dtrjw\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.314590 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.315386 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-config-data\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.315532 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-logs\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.315677 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.315773 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzsj4\" (UniqueName: \"kubernetes.io/projected/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-kube-api-access-jzsj4\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.372294 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.373766 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.389120 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.411716 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.420028 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-config-data\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.420073 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-logs\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.420134 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.420154 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzsj4\" (UniqueName: \"kubernetes.io/projected/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-kube-api-access-jzsj4\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.423872 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-logs\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.446056 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.448049 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-config-data\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.501316 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzsj4\" (UniqueName: \"kubernetes.io/projected/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-kube-api-access-jzsj4\") pod \"nova-api-0\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.523054 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.523174 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-config-data\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.523212 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56796\" (UniqueName: \"kubernetes.io/projected/f3e07f1f-638c-44ad-90cb-4dead1896d6b-kube-api-access-56796\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.523248 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3e07f1f-638c-44ad-90cb-4dead1896d6b-logs\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.535225 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.536462 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.540603 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.556275 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.557877 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.563373 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.578077 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.602335 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.616777 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c57649dd5-pb6qb"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.637333 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-config-data\") pod \"nova-scheduler-0\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.637437 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.637492 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6hrj\" (UniqueName: \"kubernetes.io/projected/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-kube-api-access-t6hrj\") pod \"nova-scheduler-0\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.637571 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-config-data\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.637647 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.637882 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56796\" (UniqueName: \"kubernetes.io/projected/f3e07f1f-638c-44ad-90cb-4dead1896d6b-kube-api-access-56796\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.638090 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3e07f1f-638c-44ad-90cb-4dead1896d6b-logs\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.646824 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.647817 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.657769 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.674575 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3e07f1f-638c-44ad-90cb-4dead1896d6b-logs\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.678896 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-config-data\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.681832 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c57649dd5-pb6qb"] Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.698864 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56796\" (UniqueName: \"kubernetes.io/projected/f3e07f1f-638c-44ad-90cb-4dead1896d6b-kube-api-access-56796\") pod \"nova-metadata-0\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.748880 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.748952 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-config\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.748978 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.749013 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-dns-svc\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.749054 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-config-data\") pod \"nova-scheduler-0\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.749136 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8vkd\" (UniqueName: \"kubernetes.io/projected/baae51f3-1e71-48bb-a9c3-efbc282f8132-kube-api-access-k8vkd\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.749161 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.749191 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-sb\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.749219 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnxlj\" (UniqueName: \"kubernetes.io/projected/fb55517c-d53c-40c9-9109-a5afc12665ba-kube-api-access-vnxlj\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.749237 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6hrj\" (UniqueName: \"kubernetes.io/projected/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-kube-api-access-t6hrj\") pod \"nova-scheduler-0\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.749272 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-nb\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.763898 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-config-data\") pod \"nova-scheduler-0\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.763930 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.774806 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6hrj\" (UniqueName: \"kubernetes.io/projected/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-kube-api-access-t6hrj\") pod \"nova-scheduler-0\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.799219 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.851822 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8vkd\" (UniqueName: \"kubernetes.io/projected/baae51f3-1e71-48bb-a9c3-efbc282f8132-kube-api-access-k8vkd\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.851874 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.851903 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-sb\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.851930 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnxlj\" (UniqueName: \"kubernetes.io/projected/fb55517c-d53c-40c9-9109-a5afc12665ba-kube-api-access-vnxlj\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.851960 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-nb\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.852067 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-config\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.852093 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.852509 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-dns-svc\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.855649 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-sb\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.856252 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-dns-svc\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.856494 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-config\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.856878 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.856985 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-nb\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.858174 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.870197 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8vkd\" (UniqueName: \"kubernetes.io/projected/baae51f3-1e71-48bb-a9c3-efbc282f8132-kube-api-access-k8vkd\") pod \"dnsmasq-dns-7c57649dd5-pb6qb\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.886714 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnxlj\" (UniqueName: \"kubernetes.io/projected/fb55517c-d53c-40c9-9109-a5afc12665ba-kube-api-access-vnxlj\") pod \"nova-cell1-novncproxy-0\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.896568 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.958057 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:30 crc kubenswrapper[4876]: I1215 08:52:30.986735 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.091736 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-dtrjw"] Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.185250 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-dtrjw" event={"ID":"f173f1f0-710b-49df-b7d4-41407cc721ee","Type":"ContainerStarted","Data":"d79cb733ae2357c6808b2983efc00e75d172d9bc1565e0b9b86de1068a258bf4"} Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.223921 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-z5c25"] Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.232421 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.242078 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.260136 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.260383 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.286126 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-z5c25"] Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.364261 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-config-data\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.364311 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq9qj\" (UniqueName: \"kubernetes.io/projected/22f2e283-c325-496d-ac96-3624106f8233-kube-api-access-rq9qj\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.364380 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.364471 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-scripts\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.465829 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.466279 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-scripts\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.466351 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-config-data\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.466384 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq9qj\" (UniqueName: \"kubernetes.io/projected/22f2e283-c325-496d-ac96-3624106f8233-kube-api-access-rq9qj\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.471560 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-config-data\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.477774 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.494721 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-scripts\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.496865 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq9qj\" (UniqueName: \"kubernetes.io/projected/22f2e283-c325-496d-ac96-3624106f8233-kube-api-access-rq9qj\") pod \"nova-cell1-conductor-db-sync-z5c25\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.685971 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.696409 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.804424 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.867971 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 08:52:31 crc kubenswrapper[4876]: I1215 08:52:31.968132 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c57649dd5-pb6qb"] Dec 15 08:52:31 crc kubenswrapper[4876]: W1215 08:52:31.980458 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbaae51f3_1e71_48bb_a9c3_efbc282f8132.slice/crio-21ded807cc38d561af80f61cb48b394b8aaec814c1312b2627d28b3d85f1c4c7 WatchSource:0}: Error finding container 21ded807cc38d561af80f61cb48b394b8aaec814c1312b2627d28b3d85f1c4c7: Status 404 returned error can't find the container with id 21ded807cc38d561af80f61cb48b394b8aaec814c1312b2627d28b3d85f1c4c7 Dec 15 08:52:32 crc kubenswrapper[4876]: W1215 08:52:32.180345 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22f2e283_c325_496d_ac96_3624106f8233.slice/crio-79732cf61db85f336f836c1138892a8f3ebec1a7a31190a9c13f33fb91ad76aa WatchSource:0}: Error finding container 79732cf61db85f336f836c1138892a8f3ebec1a7a31190a9c13f33fb91ad76aa: Status 404 returned error can't find the container with id 79732cf61db85f336f836c1138892a8f3ebec1a7a31190a9c13f33fb91ad76aa Dec 15 08:52:32 crc kubenswrapper[4876]: I1215 08:52:32.188689 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-z5c25"] Dec 15 08:52:32 crc kubenswrapper[4876]: I1215 08:52:32.209219 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-dtrjw" event={"ID":"f173f1f0-710b-49df-b7d4-41407cc721ee","Type":"ContainerStarted","Data":"1b1bcca7e20d911935e8f6ceed87a9152a4c4a00d644bb4fb06a6845089693ae"} Dec 15 08:52:32 crc kubenswrapper[4876]: I1215 08:52:32.228883 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" event={"ID":"baae51f3-1e71-48bb-a9c3-efbc282f8132","Type":"ContainerStarted","Data":"a1ee7d4f9dfab2f5d1f1becb07d7c6ee8d7e587b8193b9ca2c13b01a2fd3c241"} Dec 15 08:52:32 crc kubenswrapper[4876]: I1215 08:52:32.228926 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" event={"ID":"baae51f3-1e71-48bb-a9c3-efbc282f8132","Type":"ContainerStarted","Data":"21ded807cc38d561af80f61cb48b394b8aaec814c1312b2627d28b3d85f1c4c7"} Dec 15 08:52:32 crc kubenswrapper[4876]: I1215 08:52:32.231072 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ad4818eb-206d-4afe-90a0-ea69e74fd1fb","Type":"ContainerStarted","Data":"2429862062c0382ab660cadf8e338a62d4960e058e626a8c5d4add807217e6aa"} Dec 15 08:52:32 crc kubenswrapper[4876]: I1215 08:52:32.232094 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-dtrjw" podStartSLOduration=3.232083486 podStartE2EDuration="3.232083486s" podCreationTimestamp="2025-12-15 08:52:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:52:32.226513456 +0000 UTC m=+7277.797656377" watchObservedRunningTime="2025-12-15 08:52:32.232083486 +0000 UTC m=+7277.803226397" Dec 15 08:52:32 crc kubenswrapper[4876]: I1215 08:52:32.233389 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-z5c25" event={"ID":"22f2e283-c325-496d-ac96-3624106f8233","Type":"ContainerStarted","Data":"79732cf61db85f336f836c1138892a8f3ebec1a7a31190a9c13f33fb91ad76aa"} Dec 15 08:52:32 crc kubenswrapper[4876]: I1215 08:52:32.237330 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fb55517c-d53c-40c9-9109-a5afc12665ba","Type":"ContainerStarted","Data":"44e7128ebe79321e1b209afed6fda02a9ab300e9c67f96df4bfeaf79f3d0ce3d"} Dec 15 08:52:32 crc kubenswrapper[4876]: I1215 08:52:32.238654 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3e07f1f-638c-44ad-90cb-4dead1896d6b","Type":"ContainerStarted","Data":"ea82ab4510d54ed774c6dbd50f25c729dca3ad58eb34127090b37067b68b2ca3"} Dec 15 08:52:32 crc kubenswrapper[4876]: I1215 08:52:32.240956 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70","Type":"ContainerStarted","Data":"0633ad0cf7208bc6d54c0af3ace81224055e0ddd4b0056fddbe38f1c6ecffc80"} Dec 15 08:52:33 crc kubenswrapper[4876]: I1215 08:52:33.253730 4876 generic.go:334] "Generic (PLEG): container finished" podID="baae51f3-1e71-48bb-a9c3-efbc282f8132" containerID="a1ee7d4f9dfab2f5d1f1becb07d7c6ee8d7e587b8193b9ca2c13b01a2fd3c241" exitCode=0 Dec 15 08:52:33 crc kubenswrapper[4876]: I1215 08:52:33.253826 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" event={"ID":"baae51f3-1e71-48bb-a9c3-efbc282f8132","Type":"ContainerDied","Data":"a1ee7d4f9dfab2f5d1f1becb07d7c6ee8d7e587b8193b9ca2c13b01a2fd3c241"} Dec 15 08:52:33 crc kubenswrapper[4876]: I1215 08:52:33.254041 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" event={"ID":"baae51f3-1e71-48bb-a9c3-efbc282f8132","Type":"ContainerStarted","Data":"b97565fbc81a3654ac9590ce2325492d6e25822361092acfb1c6b81ffa47d936"} Dec 15 08:52:33 crc kubenswrapper[4876]: I1215 08:52:33.254068 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:33 crc kubenswrapper[4876]: I1215 08:52:33.258465 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-z5c25" event={"ID":"22f2e283-c325-496d-ac96-3624106f8233","Type":"ContainerStarted","Data":"b1dfea02d2f9687f45c446126f30bc122aa5f490f59e9981b9129ad30eb8cfeb"} Dec 15 08:52:33 crc kubenswrapper[4876]: I1215 08:52:33.282806 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" podStartSLOduration=3.282780241 podStartE2EDuration="3.282780241s" podCreationTimestamp="2025-12-15 08:52:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:52:33.274495119 +0000 UTC m=+7278.845638030" watchObservedRunningTime="2025-12-15 08:52:33.282780241 +0000 UTC m=+7278.853923152" Dec 15 08:52:33 crc kubenswrapper[4876]: I1215 08:52:33.300544 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-z5c25" podStartSLOduration=2.300517398 podStartE2EDuration="2.300517398s" podCreationTimestamp="2025-12-15 08:52:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:52:33.292456291 +0000 UTC m=+7278.863599212" watchObservedRunningTime="2025-12-15 08:52:33.300517398 +0000 UTC m=+7278.871660309" Dec 15 08:52:34 crc kubenswrapper[4876]: I1215 08:52:34.714688 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:52:34 crc kubenswrapper[4876]: E1215 08:52:34.716382 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.277708 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3e07f1f-638c-44ad-90cb-4dead1896d6b","Type":"ContainerStarted","Data":"5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f"} Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.278085 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3e07f1f-638c-44ad-90cb-4dead1896d6b","Type":"ContainerStarted","Data":"1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299"} Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.279533 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70","Type":"ContainerStarted","Data":"f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424"} Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.281831 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ad4818eb-206d-4afe-90a0-ea69e74fd1fb","Type":"ContainerStarted","Data":"086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635"} Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.281888 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ad4818eb-206d-4afe-90a0-ea69e74fd1fb","Type":"ContainerStarted","Data":"acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca"} Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.293466 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fb55517c-d53c-40c9-9109-a5afc12665ba","Type":"ContainerStarted","Data":"45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0"} Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.307170 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.048390961 podStartE2EDuration="5.307143203s" podCreationTimestamp="2025-12-15 08:52:30 +0000 UTC" firstStartedPulling="2025-12-15 08:52:31.823875805 +0000 UTC m=+7277.395018716" lastFinishedPulling="2025-12-15 08:52:34.082628047 +0000 UTC m=+7279.653770958" observedRunningTime="2025-12-15 08:52:35.296730504 +0000 UTC m=+7280.867873415" watchObservedRunningTime="2025-12-15 08:52:35.307143203 +0000 UTC m=+7280.878286124" Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.326214 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.965764022 podStartE2EDuration="5.326194765s" podCreationTimestamp="2025-12-15 08:52:30 +0000 UTC" firstStartedPulling="2025-12-15 08:52:31.728493882 +0000 UTC m=+7277.299636793" lastFinishedPulling="2025-12-15 08:52:34.088924625 +0000 UTC m=+7279.660067536" observedRunningTime="2025-12-15 08:52:35.319816484 +0000 UTC m=+7280.890959425" watchObservedRunningTime="2025-12-15 08:52:35.326194765 +0000 UTC m=+7280.897337676" Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.350436 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.549011752 podStartE2EDuration="5.350415116s" podCreationTimestamp="2025-12-15 08:52:30 +0000 UTC" firstStartedPulling="2025-12-15 08:52:31.281270694 +0000 UTC m=+7276.852413645" lastFinishedPulling="2025-12-15 08:52:34.082674098 +0000 UTC m=+7279.653817009" observedRunningTime="2025-12-15 08:52:35.343581492 +0000 UTC m=+7280.914724423" watchObservedRunningTime="2025-12-15 08:52:35.350415116 +0000 UTC m=+7280.921558027" Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.360952 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.150542058 podStartE2EDuration="5.360930369s" podCreationTimestamp="2025-12-15 08:52:30 +0000 UTC" firstStartedPulling="2025-12-15 08:52:31.877899658 +0000 UTC m=+7277.449042569" lastFinishedPulling="2025-12-15 08:52:34.088287969 +0000 UTC m=+7279.659430880" observedRunningTime="2025-12-15 08:52:35.360189839 +0000 UTC m=+7280.931332750" watchObservedRunningTime="2025-12-15 08:52:35.360930369 +0000 UTC m=+7280.932073280" Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.799877 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.799938 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.896981 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 15 08:52:35 crc kubenswrapper[4876]: I1215 08:52:35.959026 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:36 crc kubenswrapper[4876]: I1215 08:52:36.302160 4876 generic.go:334] "Generic (PLEG): container finished" podID="22f2e283-c325-496d-ac96-3624106f8233" containerID="b1dfea02d2f9687f45c446126f30bc122aa5f490f59e9981b9129ad30eb8cfeb" exitCode=0 Dec 15 08:52:36 crc kubenswrapper[4876]: I1215 08:52:36.302244 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-z5c25" event={"ID":"22f2e283-c325-496d-ac96-3624106f8233","Type":"ContainerDied","Data":"b1dfea02d2f9687f45c446126f30bc122aa5f490f59e9981b9129ad30eb8cfeb"} Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.314894 4876 generic.go:334] "Generic (PLEG): container finished" podID="f173f1f0-710b-49df-b7d4-41407cc721ee" containerID="1b1bcca7e20d911935e8f6ceed87a9152a4c4a00d644bb4fb06a6845089693ae" exitCode=0 Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.314972 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-dtrjw" event={"ID":"f173f1f0-710b-49df-b7d4-41407cc721ee","Type":"ContainerDied","Data":"1b1bcca7e20d911935e8f6ceed87a9152a4c4a00d644bb4fb06a6845089693ae"} Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.650348 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.809177 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq9qj\" (UniqueName: \"kubernetes.io/projected/22f2e283-c325-496d-ac96-3624106f8233-kube-api-access-rq9qj\") pod \"22f2e283-c325-496d-ac96-3624106f8233\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.809313 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-combined-ca-bundle\") pod \"22f2e283-c325-496d-ac96-3624106f8233\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.809381 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-scripts\") pod \"22f2e283-c325-496d-ac96-3624106f8233\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.809405 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-config-data\") pod \"22f2e283-c325-496d-ac96-3624106f8233\" (UID: \"22f2e283-c325-496d-ac96-3624106f8233\") " Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.819493 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22f2e283-c325-496d-ac96-3624106f8233-kube-api-access-rq9qj" (OuterVolumeSpecName: "kube-api-access-rq9qj") pod "22f2e283-c325-496d-ac96-3624106f8233" (UID: "22f2e283-c325-496d-ac96-3624106f8233"). InnerVolumeSpecName "kube-api-access-rq9qj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.820234 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-scripts" (OuterVolumeSpecName: "scripts") pod "22f2e283-c325-496d-ac96-3624106f8233" (UID: "22f2e283-c325-496d-ac96-3624106f8233"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.841923 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-config-data" (OuterVolumeSpecName: "config-data") pod "22f2e283-c325-496d-ac96-3624106f8233" (UID: "22f2e283-c325-496d-ac96-3624106f8233"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.842253 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22f2e283-c325-496d-ac96-3624106f8233" (UID: "22f2e283-c325-496d-ac96-3624106f8233"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.911456 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.911493 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.911506 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq9qj\" (UniqueName: \"kubernetes.io/projected/22f2e283-c325-496d-ac96-3624106f8233-kube-api-access-rq9qj\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:37 crc kubenswrapper[4876]: I1215 08:52:37.911519 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22f2e283-c325-496d-ac96-3624106f8233-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.327019 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-z5c25" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.327038 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-z5c25" event={"ID":"22f2e283-c325-496d-ac96-3624106f8233","Type":"ContainerDied","Data":"79732cf61db85f336f836c1138892a8f3ebec1a7a31190a9c13f33fb91ad76aa"} Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.329075 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79732cf61db85f336f836c1138892a8f3ebec1a7a31190a9c13f33fb91ad76aa" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.407011 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 08:52:38 crc kubenswrapper[4876]: E1215 08:52:38.407539 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22f2e283-c325-496d-ac96-3624106f8233" containerName="nova-cell1-conductor-db-sync" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.407564 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="22f2e283-c325-496d-ac96-3624106f8233" containerName="nova-cell1-conductor-db-sync" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.407731 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="22f2e283-c325-496d-ac96-3624106f8233" containerName="nova-cell1-conductor-db-sync" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.408480 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.410418 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.417804 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.521634 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.521823 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr58t\" (UniqueName: \"kubernetes.io/projected/ebceed8f-b110-451e-a2b3-b228414b2b5c-kube-api-access-pr58t\") pod \"nova-cell1-conductor-0\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.522239 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.631576 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.631671 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.631720 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr58t\" (UniqueName: \"kubernetes.io/projected/ebceed8f-b110-451e-a2b3-b228414b2b5c-kube-api-access-pr58t\") pod \"nova-cell1-conductor-0\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.639402 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.658064 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.661344 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr58t\" (UniqueName: \"kubernetes.io/projected/ebceed8f-b110-451e-a2b3-b228414b2b5c-kube-api-access-pr58t\") pod \"nova-cell1-conductor-0\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.723499 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.814167 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.935296 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-combined-ca-bundle\") pod \"f173f1f0-710b-49df-b7d4-41407cc721ee\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.935468 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-config-data\") pod \"f173f1f0-710b-49df-b7d4-41407cc721ee\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.935517 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-scripts\") pod \"f173f1f0-710b-49df-b7d4-41407cc721ee\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.935591 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6bss\" (UniqueName: \"kubernetes.io/projected/f173f1f0-710b-49df-b7d4-41407cc721ee-kube-api-access-v6bss\") pod \"f173f1f0-710b-49df-b7d4-41407cc721ee\" (UID: \"f173f1f0-710b-49df-b7d4-41407cc721ee\") " Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.941877 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-scripts" (OuterVolumeSpecName: "scripts") pod "f173f1f0-710b-49df-b7d4-41407cc721ee" (UID: "f173f1f0-710b-49df-b7d4-41407cc721ee"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.959719 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f173f1f0-710b-49df-b7d4-41407cc721ee-kube-api-access-v6bss" (OuterVolumeSpecName: "kube-api-access-v6bss") pod "f173f1f0-710b-49df-b7d4-41407cc721ee" (UID: "f173f1f0-710b-49df-b7d4-41407cc721ee"). InnerVolumeSpecName "kube-api-access-v6bss". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.969350 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f173f1f0-710b-49df-b7d4-41407cc721ee" (UID: "f173f1f0-710b-49df-b7d4-41407cc721ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:38 crc kubenswrapper[4876]: I1215 08:52:38.972719 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-config-data" (OuterVolumeSpecName: "config-data") pod "f173f1f0-710b-49df-b7d4-41407cc721ee" (UID: "f173f1f0-710b-49df-b7d4-41407cc721ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.039307 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.039336 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.039345 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6bss\" (UniqueName: \"kubernetes.io/projected/f173f1f0-710b-49df-b7d4-41407cc721ee-kube-api-access-v6bss\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.039354 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f173f1f0-710b-49df-b7d4-41407cc721ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.174295 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.340172 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ebceed8f-b110-451e-a2b3-b228414b2b5c","Type":"ContainerStarted","Data":"12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122"} Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.340250 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ebceed8f-b110-451e-a2b3-b228414b2b5c","Type":"ContainerStarted","Data":"334491fa2ae0e50454a68d2530365597b64ee6c82cd9b872d90fc3fd70332c4a"} Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.340271 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.342482 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-dtrjw" event={"ID":"f173f1f0-710b-49df-b7d4-41407cc721ee","Type":"ContainerDied","Data":"d79cb733ae2357c6808b2983efc00e75d172d9bc1565e0b9b86de1068a258bf4"} Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.342547 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d79cb733ae2357c6808b2983efc00e75d172d9bc1565e0b9b86de1068a258bf4" Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.342608 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-dtrjw" Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.372776 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.372742681 podStartE2EDuration="1.372742681s" podCreationTimestamp="2025-12-15 08:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:52:39.36825959 +0000 UTC m=+7284.939402501" watchObservedRunningTime="2025-12-15 08:52:39.372742681 +0000 UTC m=+7284.943885592" Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.517530 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.517829 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" containerName="nova-api-log" containerID="cri-o://acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca" gracePeriod=30 Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.518127 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" containerName="nova-api-api" containerID="cri-o://086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635" gracePeriod=30 Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.536043 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.536633 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e3bb7e95-5f27-46d4-8293-76e0e6b6ba70" containerName="nova-scheduler-scheduler" containerID="cri-o://f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424" gracePeriod=30 Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.607284 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.607520 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" containerName="nova-metadata-log" containerID="cri-o://1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299" gracePeriod=30 Dec 15 08:52:39 crc kubenswrapper[4876]: I1215 08:52:39.607566 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" containerName="nova-metadata-metadata" containerID="cri-o://5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f" gracePeriod=30 Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.103082 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.172916 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.180412 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-config-data\") pod \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.180512 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-logs\") pod \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.180566 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzsj4\" (UniqueName: \"kubernetes.io/projected/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-kube-api-access-jzsj4\") pod \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.180678 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-combined-ca-bundle\") pod \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\" (UID: \"ad4818eb-206d-4afe-90a0-ea69e74fd1fb\") " Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.181740 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-logs" (OuterVolumeSpecName: "logs") pod "ad4818eb-206d-4afe-90a0-ea69e74fd1fb" (UID: "ad4818eb-206d-4afe-90a0-ea69e74fd1fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.207287 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-kube-api-access-jzsj4" (OuterVolumeSpecName: "kube-api-access-jzsj4") pod "ad4818eb-206d-4afe-90a0-ea69e74fd1fb" (UID: "ad4818eb-206d-4afe-90a0-ea69e74fd1fb"). InnerVolumeSpecName "kube-api-access-jzsj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.242262 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad4818eb-206d-4afe-90a0-ea69e74fd1fb" (UID: "ad4818eb-206d-4afe-90a0-ea69e74fd1fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.256760 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-config-data" (OuterVolumeSpecName: "config-data") pod "ad4818eb-206d-4afe-90a0-ea69e74fd1fb" (UID: "ad4818eb-206d-4afe-90a0-ea69e74fd1fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.283559 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-combined-ca-bundle\") pod \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.283646 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3e07f1f-638c-44ad-90cb-4dead1896d6b-logs\") pod \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.283754 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-config-data\") pod \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.283805 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56796\" (UniqueName: \"kubernetes.io/projected/f3e07f1f-638c-44ad-90cb-4dead1896d6b-kube-api-access-56796\") pod \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\" (UID: \"f3e07f1f-638c-44ad-90cb-4dead1896d6b\") " Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.284098 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.284130 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.284141 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzsj4\" (UniqueName: \"kubernetes.io/projected/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-kube-api-access-jzsj4\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.284151 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad4818eb-206d-4afe-90a0-ea69e74fd1fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.284451 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3e07f1f-638c-44ad-90cb-4dead1896d6b-logs" (OuterVolumeSpecName: "logs") pod "f3e07f1f-638c-44ad-90cb-4dead1896d6b" (UID: "f3e07f1f-638c-44ad-90cb-4dead1896d6b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.304395 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3e07f1f-638c-44ad-90cb-4dead1896d6b-kube-api-access-56796" (OuterVolumeSpecName: "kube-api-access-56796") pod "f3e07f1f-638c-44ad-90cb-4dead1896d6b" (UID: "f3e07f1f-638c-44ad-90cb-4dead1896d6b"). InnerVolumeSpecName "kube-api-access-56796". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.326920 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-config-data" (OuterVolumeSpecName: "config-data") pod "f3e07f1f-638c-44ad-90cb-4dead1896d6b" (UID: "f3e07f1f-638c-44ad-90cb-4dead1896d6b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.387145 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.387995 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56796\" (UniqueName: \"kubernetes.io/projected/f3e07f1f-638c-44ad-90cb-4dead1896d6b-kube-api-access-56796\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.388082 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3e07f1f-638c-44ad-90cb-4dead1896d6b-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.391043 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3e07f1f-638c-44ad-90cb-4dead1896d6b" (UID: "f3e07f1f-638c-44ad-90cb-4dead1896d6b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.392298 4876 generic.go:334] "Generic (PLEG): container finished" podID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" containerID="5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f" exitCode=0 Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.392330 4876 generic.go:334] "Generic (PLEG): container finished" podID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" containerID="1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299" exitCode=143 Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.392379 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.392383 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3e07f1f-638c-44ad-90cb-4dead1896d6b","Type":"ContainerDied","Data":"5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f"} Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.395435 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3e07f1f-638c-44ad-90cb-4dead1896d6b","Type":"ContainerDied","Data":"1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299"} Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.395450 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3e07f1f-638c-44ad-90cb-4dead1896d6b","Type":"ContainerDied","Data":"ea82ab4510d54ed774c6dbd50f25c729dca3ad58eb34127090b37067b68b2ca3"} Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.395466 4876 scope.go:117] "RemoveContainer" containerID="5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.402477 4876 generic.go:334] "Generic (PLEG): container finished" podID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" containerID="086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635" exitCode=0 Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.402506 4876 generic.go:334] "Generic (PLEG): container finished" podID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" containerID="acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca" exitCode=143 Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.402543 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.402593 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ad4818eb-206d-4afe-90a0-ea69e74fd1fb","Type":"ContainerDied","Data":"086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635"} Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.402617 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ad4818eb-206d-4afe-90a0-ea69e74fd1fb","Type":"ContainerDied","Data":"acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca"} Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.402629 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ad4818eb-206d-4afe-90a0-ea69e74fd1fb","Type":"ContainerDied","Data":"2429862062c0382ab660cadf8e338a62d4960e058e626a8c5d4add807217e6aa"} Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.421812 4876 scope.go:117] "RemoveContainer" containerID="1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.497338 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3e07f1f-638c-44ad-90cb-4dead1896d6b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.498813 4876 scope.go:117] "RemoveContainer" containerID="5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f" Dec 15 08:52:40 crc kubenswrapper[4876]: E1215 08:52:40.513737 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f\": container with ID starting with 5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f not found: ID does not exist" containerID="5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.513776 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f"} err="failed to get container status \"5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f\": rpc error: code = NotFound desc = could not find container \"5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f\": container with ID starting with 5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f not found: ID does not exist" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.513802 4876 scope.go:117] "RemoveContainer" containerID="1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299" Dec 15 08:52:40 crc kubenswrapper[4876]: E1215 08:52:40.514384 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299\": container with ID starting with 1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299 not found: ID does not exist" containerID="1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.514416 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299"} err="failed to get container status \"1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299\": rpc error: code = NotFound desc = could not find container \"1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299\": container with ID starting with 1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299 not found: ID does not exist" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.514443 4876 scope.go:117] "RemoveContainer" containerID="5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.519147 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f"} err="failed to get container status \"5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f\": rpc error: code = NotFound desc = could not find container \"5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f\": container with ID starting with 5fc7024329f153b0d9a408bcc775710e2ee2f464c0a22917eeaf6da79706080f not found: ID does not exist" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.519198 4876 scope.go:117] "RemoveContainer" containerID="1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.521500 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299"} err="failed to get container status \"1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299\": rpc error: code = NotFound desc = could not find container \"1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299\": container with ID starting with 1e6637967fc0ebbe118ab214832225f1fee4d12a64785e35a914c9472ec43299 not found: ID does not exist" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.521545 4876 scope.go:117] "RemoveContainer" containerID="086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.525263 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.550329 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.561354 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.564570 4876 scope.go:117] "RemoveContainer" containerID="acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.570349 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.578887 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:52:40 crc kubenswrapper[4876]: E1215 08:52:40.579349 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" containerName="nova-api-log" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.579370 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" containerName="nova-api-log" Dec 15 08:52:40 crc kubenswrapper[4876]: E1215 08:52:40.579389 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" containerName="nova-api-api" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.579399 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" containerName="nova-api-api" Dec 15 08:52:40 crc kubenswrapper[4876]: E1215 08:52:40.579421 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" containerName="nova-metadata-log" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.579430 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" containerName="nova-metadata-log" Dec 15 08:52:40 crc kubenswrapper[4876]: E1215 08:52:40.579444 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" containerName="nova-metadata-metadata" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.579452 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" containerName="nova-metadata-metadata" Dec 15 08:52:40 crc kubenswrapper[4876]: E1215 08:52:40.579463 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f173f1f0-710b-49df-b7d4-41407cc721ee" containerName="nova-manage" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.579471 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f173f1f0-710b-49df-b7d4-41407cc721ee" containerName="nova-manage" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.579659 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" containerName="nova-metadata-log" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.579680 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" containerName="nova-api-log" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.579695 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" containerName="nova-metadata-metadata" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.579703 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f173f1f0-710b-49df-b7d4-41407cc721ee" containerName="nova-manage" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.579716 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" containerName="nova-api-api" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.584365 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.587613 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.590702 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.592913 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.595348 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.596992 4876 scope.go:117] "RemoveContainer" containerID="086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635" Dec 15 08:52:40 crc kubenswrapper[4876]: E1215 08:52:40.599025 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635\": container with ID starting with 086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635 not found: ID does not exist" containerID="086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.599067 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635"} err="failed to get container status \"086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635\": rpc error: code = NotFound desc = could not find container \"086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635\": container with ID starting with 086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635 not found: ID does not exist" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.599095 4876 scope.go:117] "RemoveContainer" containerID="acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca" Dec 15 08:52:40 crc kubenswrapper[4876]: E1215 08:52:40.599385 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca\": container with ID starting with acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca not found: ID does not exist" containerID="acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.599411 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca"} err="failed to get container status \"acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca\": rpc error: code = NotFound desc = could not find container \"acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca\": container with ID starting with acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca not found: ID does not exist" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.599427 4876 scope.go:117] "RemoveContainer" containerID="086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.599755 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635"} err="failed to get container status \"086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635\": rpc error: code = NotFound desc = could not find container \"086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635\": container with ID starting with 086359705b41cc7735ae0d994a4e335ab133d09efc40aec2abb0596b9b157635 not found: ID does not exist" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.599869 4876 scope.go:117] "RemoveContainer" containerID="acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.600154 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca"} err="failed to get container status \"acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca\": rpc error: code = NotFound desc = could not find container \"acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca\": container with ID starting with acecfadd2a84a3cfd3ef1a575da7799b7caefe21c94669eb16bfa618ae1eabca not found: ID does not exist" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.604658 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.620121 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.700008 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-config-data\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.700069 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.700133 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/012bf717-5e17-4143-89f4-bf2f57726453-logs\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.700171 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg7m4\" (UniqueName: \"kubernetes.io/projected/012bf717-5e17-4143-89f4-bf2f57726453-kube-api-access-mg7m4\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.700243 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39692525-cc0a-49c8-b3b6-d1003f02d768-logs\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.700269 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-config-data\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.700290 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.700332 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4g7r\" (UniqueName: \"kubernetes.io/projected/39692525-cc0a-49c8-b3b6-d1003f02d768-kube-api-access-w4g7r\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.716312 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad4818eb-206d-4afe-90a0-ea69e74fd1fb" path="/var/lib/kubelet/pods/ad4818eb-206d-4afe-90a0-ea69e74fd1fb/volumes" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.717063 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3e07f1f-638c-44ad-90cb-4dead1896d6b" path="/var/lib/kubelet/pods/f3e07f1f-638c-44ad-90cb-4dead1896d6b/volumes" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.805743 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-config-data\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.805830 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.805897 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/012bf717-5e17-4143-89f4-bf2f57726453-logs\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.805937 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg7m4\" (UniqueName: \"kubernetes.io/projected/012bf717-5e17-4143-89f4-bf2f57726453-kube-api-access-mg7m4\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.805997 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39692525-cc0a-49c8-b3b6-d1003f02d768-logs\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.806021 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-config-data\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.806044 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.806120 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4g7r\" (UniqueName: \"kubernetes.io/projected/39692525-cc0a-49c8-b3b6-d1003f02d768-kube-api-access-w4g7r\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.806823 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/012bf717-5e17-4143-89f4-bf2f57726453-logs\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.807834 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39692525-cc0a-49c8-b3b6-d1003f02d768-logs\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.810770 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.810873 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-config-data\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.811086 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-config-data\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.812555 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.825585 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4g7r\" (UniqueName: \"kubernetes.io/projected/39692525-cc0a-49c8-b3b6-d1003f02d768-kube-api-access-w4g7r\") pod \"nova-api-0\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.825947 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg7m4\" (UniqueName: \"kubernetes.io/projected/012bf717-5e17-4143-89f4-bf2f57726453-kube-api-access-mg7m4\") pod \"nova-metadata-0\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.919412 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.920389 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.958505 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.975809 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:40 crc kubenswrapper[4876]: I1215 08:52:40.990272 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.074719 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5887476b87-vvqpk"] Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.074985 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" podUID="7b351b28-6a7f-4224-9dbc-6585daf22d31" containerName="dnsmasq-dns" containerID="cri-o://301c95455c770ee1cfa8b72687e27dd45a185fce8720955281cfac2dccb27071" gracePeriod=10 Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.427357 4876 generic.go:334] "Generic (PLEG): container finished" podID="7b351b28-6a7f-4224-9dbc-6585daf22d31" containerID="301c95455c770ee1cfa8b72687e27dd45a185fce8720955281cfac2dccb27071" exitCode=0 Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.427695 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" event={"ID":"7b351b28-6a7f-4224-9dbc-6585daf22d31","Type":"ContainerDied","Data":"301c95455c770ee1cfa8b72687e27dd45a185fce8720955281cfac2dccb27071"} Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.451655 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.463330 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:52:41 crc kubenswrapper[4876]: W1215 08:52:41.467384 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod012bf717_5e17_4143_89f4_bf2f57726453.slice/crio-00925a51cbe8fd34c51b7e0a2a2e15dacf81ba9f3a153543bb21817862e59170 WatchSource:0}: Error finding container 00925a51cbe8fd34c51b7e0a2a2e15dacf81ba9f3a153543bb21817862e59170: Status 404 returned error can't find the container with id 00925a51cbe8fd34c51b7e0a2a2e15dacf81ba9f3a153543bb21817862e59170 Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.602445 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.606713 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.627939 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-config\") pod \"7b351b28-6a7f-4224-9dbc-6585daf22d31\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.628009 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-sb\") pod \"7b351b28-6a7f-4224-9dbc-6585daf22d31\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.628044 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-dns-svc\") pod \"7b351b28-6a7f-4224-9dbc-6585daf22d31\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.628118 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sshwr\" (UniqueName: \"kubernetes.io/projected/7b351b28-6a7f-4224-9dbc-6585daf22d31-kube-api-access-sshwr\") pod \"7b351b28-6a7f-4224-9dbc-6585daf22d31\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.628210 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-nb\") pod \"7b351b28-6a7f-4224-9dbc-6585daf22d31\" (UID: \"7b351b28-6a7f-4224-9dbc-6585daf22d31\") " Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.639788 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b351b28-6a7f-4224-9dbc-6585daf22d31-kube-api-access-sshwr" (OuterVolumeSpecName: "kube-api-access-sshwr") pod "7b351b28-6a7f-4224-9dbc-6585daf22d31" (UID: "7b351b28-6a7f-4224-9dbc-6585daf22d31"). InnerVolumeSpecName "kube-api-access-sshwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.731057 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sshwr\" (UniqueName: \"kubernetes.io/projected/7b351b28-6a7f-4224-9dbc-6585daf22d31-kube-api-access-sshwr\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.773142 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-config" (OuterVolumeSpecName: "config") pod "7b351b28-6a7f-4224-9dbc-6585daf22d31" (UID: "7b351b28-6a7f-4224-9dbc-6585daf22d31"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.773161 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7b351b28-6a7f-4224-9dbc-6585daf22d31" (UID: "7b351b28-6a7f-4224-9dbc-6585daf22d31"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.773153 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7b351b28-6a7f-4224-9dbc-6585daf22d31" (UID: "7b351b28-6a7f-4224-9dbc-6585daf22d31"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.778046 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7b351b28-6a7f-4224-9dbc-6585daf22d31" (UID: "7b351b28-6a7f-4224-9dbc-6585daf22d31"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.833396 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.833444 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.833454 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:41 crc kubenswrapper[4876]: I1215 08:52:41.833462 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7b351b28-6a7f-4224-9dbc-6585daf22d31-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.248380 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.345792 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-config-data\") pod \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.345914 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-combined-ca-bundle\") pod \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.346072 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6hrj\" (UniqueName: \"kubernetes.io/projected/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-kube-api-access-t6hrj\") pod \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\" (UID: \"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70\") " Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.349965 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-kube-api-access-t6hrj" (OuterVolumeSpecName: "kube-api-access-t6hrj") pod "e3bb7e95-5f27-46d4-8293-76e0e6b6ba70" (UID: "e3bb7e95-5f27-46d4-8293-76e0e6b6ba70"). InnerVolumeSpecName "kube-api-access-t6hrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.379059 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3bb7e95-5f27-46d4-8293-76e0e6b6ba70" (UID: "e3bb7e95-5f27-46d4-8293-76e0e6b6ba70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.379265 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-config-data" (OuterVolumeSpecName: "config-data") pod "e3bb7e95-5f27-46d4-8293-76e0e6b6ba70" (UID: "e3bb7e95-5f27-46d4-8293-76e0e6b6ba70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.442672 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"012bf717-5e17-4143-89f4-bf2f57726453","Type":"ContainerStarted","Data":"02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5"} Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.442727 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"012bf717-5e17-4143-89f4-bf2f57726453","Type":"ContainerStarted","Data":"2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e"} Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.442740 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"012bf717-5e17-4143-89f4-bf2f57726453","Type":"ContainerStarted","Data":"00925a51cbe8fd34c51b7e0a2a2e15dacf81ba9f3a153543bb21817862e59170"} Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.446618 4876 generic.go:334] "Generic (PLEG): container finished" podID="e3bb7e95-5f27-46d4-8293-76e0e6b6ba70" containerID="f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424" exitCode=0 Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.446683 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70","Type":"ContainerDied","Data":"f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424"} Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.446714 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e3bb7e95-5f27-46d4-8293-76e0e6b6ba70","Type":"ContainerDied","Data":"0633ad0cf7208bc6d54c0af3ace81224055e0ddd4b0056fddbe38f1c6ecffc80"} Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.446733 4876 scope.go:117] "RemoveContainer" containerID="f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.446788 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.447990 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.448013 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6hrj\" (UniqueName: \"kubernetes.io/projected/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-kube-api-access-t6hrj\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.448027 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.457139 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" event={"ID":"7b351b28-6a7f-4224-9dbc-6585daf22d31","Type":"ContainerDied","Data":"6e5aaeb19696be814fd5ea24bb611412e4600b0dabb9a612bc6ee097a755e2b4"} Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.457262 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5887476b87-vvqpk" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.464906 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"39692525-cc0a-49c8-b3b6-d1003f02d768","Type":"ContainerStarted","Data":"6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b"} Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.464952 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"39692525-cc0a-49c8-b3b6-d1003f02d768","Type":"ContainerStarted","Data":"4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d"} Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.464964 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"39692525-cc0a-49c8-b3b6-d1003f02d768","Type":"ContainerStarted","Data":"c4dea2ec35033be4c68ed5d35f8fe7f1e156cc4314cf2dbb7fb8490e254367ca"} Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.473573 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.47355384 podStartE2EDuration="2.47355384s" podCreationTimestamp="2025-12-15 08:52:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:52:42.459806231 +0000 UTC m=+7288.030949162" watchObservedRunningTime="2025-12-15 08:52:42.47355384 +0000 UTC m=+7288.044696771" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.476432 4876 scope.go:117] "RemoveContainer" containerID="f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424" Dec 15 08:52:42 crc kubenswrapper[4876]: E1215 08:52:42.477020 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424\": container with ID starting with f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424 not found: ID does not exist" containerID="f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.477086 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424"} err="failed to get container status \"f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424\": rpc error: code = NotFound desc = could not find container \"f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424\": container with ID starting with f7b0f73a95d977a29cd33f370749abdc54949a3e789a1b8959ff658c9631f424 not found: ID does not exist" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.477139 4876 scope.go:117] "RemoveContainer" containerID="301c95455c770ee1cfa8b72687e27dd45a185fce8720955281cfac2dccb27071" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.491388 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.491361148 podStartE2EDuration="2.491361148s" podCreationTimestamp="2025-12-15 08:52:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:52:42.488724757 +0000 UTC m=+7288.059867668" watchObservedRunningTime="2025-12-15 08:52:42.491361148 +0000 UTC m=+7288.062504059" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.499599 4876 scope.go:117] "RemoveContainer" containerID="1dabd4e530120f244dc086f9170e447bf92e7ab9a5891ed18dd0d34e234ef2ee" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.523577 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5887476b87-vvqpk"] Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.540739 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5887476b87-vvqpk"] Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.558687 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.571265 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.602791 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:52:42 crc kubenswrapper[4876]: E1215 08:52:42.603323 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b351b28-6a7f-4224-9dbc-6585daf22d31" containerName="dnsmasq-dns" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.603340 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b351b28-6a7f-4224-9dbc-6585daf22d31" containerName="dnsmasq-dns" Dec 15 08:52:42 crc kubenswrapper[4876]: E1215 08:52:42.603386 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b351b28-6a7f-4224-9dbc-6585daf22d31" containerName="init" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.603397 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b351b28-6a7f-4224-9dbc-6585daf22d31" containerName="init" Dec 15 08:52:42 crc kubenswrapper[4876]: E1215 08:52:42.603410 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3bb7e95-5f27-46d4-8293-76e0e6b6ba70" containerName="nova-scheduler-scheduler" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.603419 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3bb7e95-5f27-46d4-8293-76e0e6b6ba70" containerName="nova-scheduler-scheduler" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.603611 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b351b28-6a7f-4224-9dbc-6585daf22d31" containerName="dnsmasq-dns" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.603636 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3bb7e95-5f27-46d4-8293-76e0e6b6ba70" containerName="nova-scheduler-scheduler" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.604477 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.606014 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.609613 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.651255 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.651307 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjkmg\" (UniqueName: \"kubernetes.io/projected/af9d5df8-f86b-49f9-8c9e-db50111705e2-kube-api-access-xjkmg\") pod \"nova-scheduler-0\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.651342 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-config-data\") pod \"nova-scheduler-0\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.715880 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b351b28-6a7f-4224-9dbc-6585daf22d31" path="/var/lib/kubelet/pods/7b351b28-6a7f-4224-9dbc-6585daf22d31/volumes" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.716517 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3bb7e95-5f27-46d4-8293-76e0e6b6ba70" path="/var/lib/kubelet/pods/e3bb7e95-5f27-46d4-8293-76e0e6b6ba70/volumes" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.753238 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.753692 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjkmg\" (UniqueName: \"kubernetes.io/projected/af9d5df8-f86b-49f9-8c9e-db50111705e2-kube-api-access-xjkmg\") pod \"nova-scheduler-0\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.753726 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-config-data\") pod \"nova-scheduler-0\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.757628 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.758378 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-config-data\") pod \"nova-scheduler-0\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.771122 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjkmg\" (UniqueName: \"kubernetes.io/projected/af9d5df8-f86b-49f9-8c9e-db50111705e2-kube-api-access-xjkmg\") pod \"nova-scheduler-0\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " pod="openstack/nova-scheduler-0" Dec 15 08:52:42 crc kubenswrapper[4876]: I1215 08:52:42.928344 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:52:43 crc kubenswrapper[4876]: I1215 08:52:43.271741 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:52:43 crc kubenswrapper[4876]: I1215 08:52:43.476595 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"af9d5df8-f86b-49f9-8c9e-db50111705e2","Type":"ContainerStarted","Data":"f9a8db7db5b98843051739897d6b01a68c4b730c5eecccd72e2a4ff74276dc18"} Dec 15 08:52:43 crc kubenswrapper[4876]: I1215 08:52:43.476657 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"af9d5df8-f86b-49f9-8c9e-db50111705e2","Type":"ContainerStarted","Data":"06ee841439126e8ddab4ad364080d7a116c094fbd95572f0f2de9f9528997095"} Dec 15 08:52:43 crc kubenswrapper[4876]: I1215 08:52:43.499706 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.499680105 podStartE2EDuration="1.499680105s" podCreationTimestamp="2025-12-15 08:52:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:52:43.499005768 +0000 UTC m=+7289.070148679" watchObservedRunningTime="2025-12-15 08:52:43.499680105 +0000 UTC m=+7289.070823036" Dec 15 08:52:45 crc kubenswrapper[4876]: I1215 08:52:45.919985 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 08:52:45 crc kubenswrapper[4876]: I1215 08:52:45.921624 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 08:52:47 crc kubenswrapper[4876]: I1215 08:52:47.928712 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 15 08:52:48 crc kubenswrapper[4876]: I1215 08:52:48.751690 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.199229 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-trpzc"] Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.200582 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.203033 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.203409 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.224843 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-trpzc"] Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.403581 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-config-data\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.403889 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.403916 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bfzm\" (UniqueName: \"kubernetes.io/projected/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-kube-api-access-5bfzm\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.403945 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-scripts\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.505243 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-config-data\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.505327 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.505347 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bfzm\" (UniqueName: \"kubernetes.io/projected/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-kube-api-access-5bfzm\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.505379 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-scripts\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.510445 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-config-data\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.511296 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-scripts\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.511654 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.530193 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bfzm\" (UniqueName: \"kubernetes.io/projected/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-kube-api-access-5bfzm\") pod \"nova-cell1-cell-mapping-trpzc\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.535326 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:49 crc kubenswrapper[4876]: I1215 08:52:49.727975 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:52:49 crc kubenswrapper[4876]: E1215 08:52:49.728522 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:52:50 crc kubenswrapper[4876]: I1215 08:52:50.012258 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-trpzc"] Dec 15 08:52:50 crc kubenswrapper[4876]: I1215 08:52:50.541481 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-trpzc" event={"ID":"8d8692dd-d36c-4f3a-83c5-c3380ae1677c","Type":"ContainerStarted","Data":"92a807f7e05a3a548421fd9d1f222d78f5b70f8c29b41cdd696f20c11db4adf0"} Dec 15 08:52:50 crc kubenswrapper[4876]: I1215 08:52:50.541856 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-trpzc" event={"ID":"8d8692dd-d36c-4f3a-83c5-c3380ae1677c","Type":"ContainerStarted","Data":"2eaa7f49d448f8a28dcc2ea807db1a69e4693b6f7cd084e865c12faca1392409"} Dec 15 08:52:50 crc kubenswrapper[4876]: I1215 08:52:50.567169 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-trpzc" podStartSLOduration=1.567136243 podStartE2EDuration="1.567136243s" podCreationTimestamp="2025-12-15 08:52:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:52:50.555600513 +0000 UTC m=+7296.126743424" watchObservedRunningTime="2025-12-15 08:52:50.567136243 +0000 UTC m=+7296.138279164" Dec 15 08:52:50 crc kubenswrapper[4876]: I1215 08:52:50.919677 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 08:52:50 crc kubenswrapper[4876]: I1215 08:52:50.919893 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 08:52:50 crc kubenswrapper[4876]: I1215 08:52:50.921146 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 08:52:50 crc kubenswrapper[4876]: I1215 08:52:50.921176 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 08:52:52 crc kubenswrapper[4876]: I1215 08:52:52.085418 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.78:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:52:52 crc kubenswrapper[4876]: I1215 08:52:52.085424 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="012bf717-5e17-4143-89f4-bf2f57726453" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.77:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:52:52 crc kubenswrapper[4876]: I1215 08:52:52.085543 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="012bf717-5e17-4143-89f4-bf2f57726453" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.77:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:52:52 crc kubenswrapper[4876]: I1215 08:52:52.085671 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.78:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:52:52 crc kubenswrapper[4876]: I1215 08:52:52.929116 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 15 08:52:52 crc kubenswrapper[4876]: I1215 08:52:52.954954 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 15 08:52:53 crc kubenswrapper[4876]: I1215 08:52:53.590932 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 15 08:52:55 crc kubenswrapper[4876]: I1215 08:52:55.582460 4876 generic.go:334] "Generic (PLEG): container finished" podID="8d8692dd-d36c-4f3a-83c5-c3380ae1677c" containerID="92a807f7e05a3a548421fd9d1f222d78f5b70f8c29b41cdd696f20c11db4adf0" exitCode=0 Dec 15 08:52:55 crc kubenswrapper[4876]: I1215 08:52:55.582499 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-trpzc" event={"ID":"8d8692dd-d36c-4f3a-83c5-c3380ae1677c","Type":"ContainerDied","Data":"92a807f7e05a3a548421fd9d1f222d78f5b70f8c29b41cdd696f20c11db4adf0"} Dec 15 08:52:56 crc kubenswrapper[4876]: I1215 08:52:56.967792 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.167415 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bfzm\" (UniqueName: \"kubernetes.io/projected/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-kube-api-access-5bfzm\") pod \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.167611 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-combined-ca-bundle\") pod \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.167651 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-config-data\") pod \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.167725 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-scripts\") pod \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\" (UID: \"8d8692dd-d36c-4f3a-83c5-c3380ae1677c\") " Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.173360 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-scripts" (OuterVolumeSpecName: "scripts") pod "8d8692dd-d36c-4f3a-83c5-c3380ae1677c" (UID: "8d8692dd-d36c-4f3a-83c5-c3380ae1677c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.173439 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-kube-api-access-5bfzm" (OuterVolumeSpecName: "kube-api-access-5bfzm") pod "8d8692dd-d36c-4f3a-83c5-c3380ae1677c" (UID: "8d8692dd-d36c-4f3a-83c5-c3380ae1677c"). InnerVolumeSpecName "kube-api-access-5bfzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.191900 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-config-data" (OuterVolumeSpecName: "config-data") pod "8d8692dd-d36c-4f3a-83c5-c3380ae1677c" (UID: "8d8692dd-d36c-4f3a-83c5-c3380ae1677c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.197414 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d8692dd-d36c-4f3a-83c5-c3380ae1677c" (UID: "8d8692dd-d36c-4f3a-83c5-c3380ae1677c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.269878 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bfzm\" (UniqueName: \"kubernetes.io/projected/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-kube-api-access-5bfzm\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.269912 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.269921 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.269929 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d8692dd-d36c-4f3a-83c5-c3380ae1677c-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.609388 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-trpzc" event={"ID":"8d8692dd-d36c-4f3a-83c5-c3380ae1677c","Type":"ContainerDied","Data":"2eaa7f49d448f8a28dcc2ea807db1a69e4693b6f7cd084e865c12faca1392409"} Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.609440 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2eaa7f49d448f8a28dcc2ea807db1a69e4693b6f7cd084e865c12faca1392409" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.609547 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-trpzc" Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.717506 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.717785 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="af9d5df8-f86b-49f9-8c9e-db50111705e2" containerName="nova-scheduler-scheduler" containerID="cri-o://f9a8db7db5b98843051739897d6b01a68c4b730c5eecccd72e2a4ff74276dc18" gracePeriod=30 Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.726466 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.727261 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerName="nova-api-log" containerID="cri-o://4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d" gracePeriod=30 Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.727369 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerName="nova-api-api" containerID="cri-o://6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b" gracePeriod=30 Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.735320 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.735532 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="012bf717-5e17-4143-89f4-bf2f57726453" containerName="nova-metadata-log" containerID="cri-o://2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e" gracePeriod=30 Dec 15 08:52:57 crc kubenswrapper[4876]: I1215 08:52:57.735637 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="012bf717-5e17-4143-89f4-bf2f57726453" containerName="nova-metadata-metadata" containerID="cri-o://02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5" gracePeriod=30 Dec 15 08:52:57 crc kubenswrapper[4876]: E1215 08:52:57.931037 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f9a8db7db5b98843051739897d6b01a68c4b730c5eecccd72e2a4ff74276dc18" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 08:52:57 crc kubenswrapper[4876]: E1215 08:52:57.932680 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f9a8db7db5b98843051739897d6b01a68c4b730c5eecccd72e2a4ff74276dc18" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 08:52:57 crc kubenswrapper[4876]: E1215 08:52:57.933946 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f9a8db7db5b98843051739897d6b01a68c4b730c5eecccd72e2a4ff74276dc18" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 08:52:57 crc kubenswrapper[4876]: E1215 08:52:57.933983 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="af9d5df8-f86b-49f9-8c9e-db50111705e2" containerName="nova-scheduler-scheduler" Dec 15 08:52:58 crc kubenswrapper[4876]: I1215 08:52:58.620155 4876 generic.go:334] "Generic (PLEG): container finished" podID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerID="4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d" exitCode=143 Dec 15 08:52:58 crc kubenswrapper[4876]: I1215 08:52:58.620209 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"39692525-cc0a-49c8-b3b6-d1003f02d768","Type":"ContainerDied","Data":"4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d"} Dec 15 08:52:58 crc kubenswrapper[4876]: I1215 08:52:58.622880 4876 generic.go:334] "Generic (PLEG): container finished" podID="012bf717-5e17-4143-89f4-bf2f57726453" containerID="2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e" exitCode=143 Dec 15 08:52:58 crc kubenswrapper[4876]: I1215 08:52:58.622929 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"012bf717-5e17-4143-89f4-bf2f57726453","Type":"ContainerDied","Data":"2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e"} Dec 15 08:52:59 crc kubenswrapper[4876]: I1215 08:52:59.637311 4876 generic.go:334] "Generic (PLEG): container finished" podID="af9d5df8-f86b-49f9-8c9e-db50111705e2" containerID="f9a8db7db5b98843051739897d6b01a68c4b730c5eecccd72e2a4ff74276dc18" exitCode=0 Dec 15 08:52:59 crc kubenswrapper[4876]: I1215 08:52:59.637396 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"af9d5df8-f86b-49f9-8c9e-db50111705e2","Type":"ContainerDied","Data":"f9a8db7db5b98843051739897d6b01a68c4b730c5eecccd72e2a4ff74276dc18"} Dec 15 08:52:59 crc kubenswrapper[4876]: I1215 08:52:59.860386 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:52:59 crc kubenswrapper[4876]: I1215 08:52:59.912511 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-combined-ca-bundle\") pod \"af9d5df8-f86b-49f9-8c9e-db50111705e2\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " Dec 15 08:52:59 crc kubenswrapper[4876]: I1215 08:52:59.912630 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjkmg\" (UniqueName: \"kubernetes.io/projected/af9d5df8-f86b-49f9-8c9e-db50111705e2-kube-api-access-xjkmg\") pod \"af9d5df8-f86b-49f9-8c9e-db50111705e2\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " Dec 15 08:52:59 crc kubenswrapper[4876]: I1215 08:52:59.912737 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-config-data\") pod \"af9d5df8-f86b-49f9-8c9e-db50111705e2\" (UID: \"af9d5df8-f86b-49f9-8c9e-db50111705e2\") " Dec 15 08:52:59 crc kubenswrapper[4876]: I1215 08:52:59.919076 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af9d5df8-f86b-49f9-8c9e-db50111705e2-kube-api-access-xjkmg" (OuterVolumeSpecName: "kube-api-access-xjkmg") pod "af9d5df8-f86b-49f9-8c9e-db50111705e2" (UID: "af9d5df8-f86b-49f9-8c9e-db50111705e2"). InnerVolumeSpecName "kube-api-access-xjkmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:52:59 crc kubenswrapper[4876]: I1215 08:52:59.939692 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-config-data" (OuterVolumeSpecName: "config-data") pod "af9d5df8-f86b-49f9-8c9e-db50111705e2" (UID: "af9d5df8-f86b-49f9-8c9e-db50111705e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:52:59 crc kubenswrapper[4876]: I1215 08:52:59.941487 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "af9d5df8-f86b-49f9-8c9e-db50111705e2" (UID: "af9d5df8-f86b-49f9-8c9e-db50111705e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.013935 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.013973 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af9d5df8-f86b-49f9-8c9e-db50111705e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.013985 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjkmg\" (UniqueName: \"kubernetes.io/projected/af9d5df8-f86b-49f9-8c9e-db50111705e2-kube-api-access-xjkmg\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.646670 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"af9d5df8-f86b-49f9-8c9e-db50111705e2","Type":"ContainerDied","Data":"06ee841439126e8ddab4ad364080d7a116c094fbd95572f0f2de9f9528997095"} Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.646717 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.647565 4876 scope.go:117] "RemoveContainer" containerID="f9a8db7db5b98843051739897d6b01a68c4b730c5eecccd72e2a4ff74276dc18" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.689557 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.697635 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.715333 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af9d5df8-f86b-49f9-8c9e-db50111705e2" path="/var/lib/kubelet/pods/af9d5df8-f86b-49f9-8c9e-db50111705e2/volumes" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.716861 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:53:00 crc kubenswrapper[4876]: E1215 08:53:00.717358 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d8692dd-d36c-4f3a-83c5-c3380ae1677c" containerName="nova-manage" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.717375 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d8692dd-d36c-4f3a-83c5-c3380ae1677c" containerName="nova-manage" Dec 15 08:53:00 crc kubenswrapper[4876]: E1215 08:53:00.717421 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af9d5df8-f86b-49f9-8c9e-db50111705e2" containerName="nova-scheduler-scheduler" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.717429 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="af9d5df8-f86b-49f9-8c9e-db50111705e2" containerName="nova-scheduler-scheduler" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.717635 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d8692dd-d36c-4f3a-83c5-c3380ae1677c" containerName="nova-manage" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.717659 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="af9d5df8-f86b-49f9-8c9e-db50111705e2" containerName="nova-scheduler-scheduler" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.718409 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.724554 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.735359 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.843559 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fvlm\" (UniqueName: \"kubernetes.io/projected/bd54c160-5bef-46b5-9485-e042399e3330-kube-api-access-9fvlm\") pod \"nova-scheduler-0\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " pod="openstack/nova-scheduler-0" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.843654 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " pod="openstack/nova-scheduler-0" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.843742 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-config-data\") pod \"nova-scheduler-0\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " pod="openstack/nova-scheduler-0" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.945821 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-config-data\") pod \"nova-scheduler-0\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " pod="openstack/nova-scheduler-0" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.945942 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fvlm\" (UniqueName: \"kubernetes.io/projected/bd54c160-5bef-46b5-9485-e042399e3330-kube-api-access-9fvlm\") pod \"nova-scheduler-0\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " pod="openstack/nova-scheduler-0" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.945997 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " pod="openstack/nova-scheduler-0" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.952279 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " pod="openstack/nova-scheduler-0" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.970248 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-config-data\") pod \"nova-scheduler-0\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " pod="openstack/nova-scheduler-0" Dec 15 08:53:00 crc kubenswrapper[4876]: I1215 08:53:00.983804 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fvlm\" (UniqueName: \"kubernetes.io/projected/bd54c160-5bef-46b5-9485-e042399e3330-kube-api-access-9fvlm\") pod \"nova-scheduler-0\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " pod="openstack/nova-scheduler-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.063697 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.351235 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.361083 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.453531 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4g7r\" (UniqueName: \"kubernetes.io/projected/39692525-cc0a-49c8-b3b6-d1003f02d768-kube-api-access-w4g7r\") pod \"39692525-cc0a-49c8-b3b6-d1003f02d768\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.454556 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-config-data\") pod \"39692525-cc0a-49c8-b3b6-d1003f02d768\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.454711 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-combined-ca-bundle\") pod \"39692525-cc0a-49c8-b3b6-d1003f02d768\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.454783 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39692525-cc0a-49c8-b3b6-d1003f02d768-logs\") pod \"39692525-cc0a-49c8-b3b6-d1003f02d768\" (UID: \"39692525-cc0a-49c8-b3b6-d1003f02d768\") " Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.455580 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39692525-cc0a-49c8-b3b6-d1003f02d768-logs" (OuterVolumeSpecName: "logs") pod "39692525-cc0a-49c8-b3b6-d1003f02d768" (UID: "39692525-cc0a-49c8-b3b6-d1003f02d768"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.457433 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39692525-cc0a-49c8-b3b6-d1003f02d768-kube-api-access-w4g7r" (OuterVolumeSpecName: "kube-api-access-w4g7r") pod "39692525-cc0a-49c8-b3b6-d1003f02d768" (UID: "39692525-cc0a-49c8-b3b6-d1003f02d768"). InnerVolumeSpecName "kube-api-access-w4g7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.476680 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-config-data" (OuterVolumeSpecName: "config-data") pod "39692525-cc0a-49c8-b3b6-d1003f02d768" (UID: "39692525-cc0a-49c8-b3b6-d1003f02d768"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.477354 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39692525-cc0a-49c8-b3b6-d1003f02d768" (UID: "39692525-cc0a-49c8-b3b6-d1003f02d768"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.556664 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/012bf717-5e17-4143-89f4-bf2f57726453-logs\") pod \"012bf717-5e17-4143-89f4-bf2f57726453\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.557044 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-combined-ca-bundle\") pod \"012bf717-5e17-4143-89f4-bf2f57726453\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.557275 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-config-data\") pod \"012bf717-5e17-4143-89f4-bf2f57726453\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.557406 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg7m4\" (UniqueName: \"kubernetes.io/projected/012bf717-5e17-4143-89f4-bf2f57726453-kube-api-access-mg7m4\") pod \"012bf717-5e17-4143-89f4-bf2f57726453\" (UID: \"012bf717-5e17-4143-89f4-bf2f57726453\") " Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.557512 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/012bf717-5e17-4143-89f4-bf2f57726453-logs" (OuterVolumeSpecName: "logs") pod "012bf717-5e17-4143-89f4-bf2f57726453" (UID: "012bf717-5e17-4143-89f4-bf2f57726453"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.558258 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.558372 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39692525-cc0a-49c8-b3b6-d1003f02d768-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.558454 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/39692525-cc0a-49c8-b3b6-d1003f02d768-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.558533 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/012bf717-5e17-4143-89f4-bf2f57726453-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.558597 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4g7r\" (UniqueName: \"kubernetes.io/projected/39692525-cc0a-49c8-b3b6-d1003f02d768-kube-api-access-w4g7r\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.561055 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/012bf717-5e17-4143-89f4-bf2f57726453-kube-api-access-mg7m4" (OuterVolumeSpecName: "kube-api-access-mg7m4") pod "012bf717-5e17-4143-89f4-bf2f57726453" (UID: "012bf717-5e17-4143-89f4-bf2f57726453"). InnerVolumeSpecName "kube-api-access-mg7m4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.579081 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "012bf717-5e17-4143-89f4-bf2f57726453" (UID: "012bf717-5e17-4143-89f4-bf2f57726453"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.580421 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-config-data" (OuterVolumeSpecName: "config-data") pod "012bf717-5e17-4143-89f4-bf2f57726453" (UID: "012bf717-5e17-4143-89f4-bf2f57726453"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.597514 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:53:01 crc kubenswrapper[4876]: W1215 08:53:01.597690 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd54c160_5bef_46b5_9485_e042399e3330.slice/crio-630218cdf8dd0bf04d0fd8e5acaa97239577fe479da26bef57d17c5a2c2ef845 WatchSource:0}: Error finding container 630218cdf8dd0bf04d0fd8e5acaa97239577fe479da26bef57d17c5a2c2ef845: Status 404 returned error can't find the container with id 630218cdf8dd0bf04d0fd8e5acaa97239577fe479da26bef57d17c5a2c2ef845 Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.659962 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.659997 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/012bf717-5e17-4143-89f4-bf2f57726453-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.660007 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg7m4\" (UniqueName: \"kubernetes.io/projected/012bf717-5e17-4143-89f4-bf2f57726453-kube-api-access-mg7m4\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.661179 4876 generic.go:334] "Generic (PLEG): container finished" podID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerID="6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b" exitCode=0 Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.661244 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.661246 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"39692525-cc0a-49c8-b3b6-d1003f02d768","Type":"ContainerDied","Data":"6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b"} Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.661346 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"39692525-cc0a-49c8-b3b6-d1003f02d768","Type":"ContainerDied","Data":"c4dea2ec35033be4c68ed5d35f8fe7f1e156cc4314cf2dbb7fb8490e254367ca"} Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.661366 4876 scope.go:117] "RemoveContainer" containerID="6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.664220 4876 generic.go:334] "Generic (PLEG): container finished" podID="012bf717-5e17-4143-89f4-bf2f57726453" containerID="02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5" exitCode=0 Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.664279 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"012bf717-5e17-4143-89f4-bf2f57726453","Type":"ContainerDied","Data":"02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5"} Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.664303 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"012bf717-5e17-4143-89f4-bf2f57726453","Type":"ContainerDied","Data":"00925a51cbe8fd34c51b7e0a2a2e15dacf81ba9f3a153543bb21817862e59170"} Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.664353 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.669674 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bd54c160-5bef-46b5-9485-e042399e3330","Type":"ContainerStarted","Data":"630218cdf8dd0bf04d0fd8e5acaa97239577fe479da26bef57d17c5a2c2ef845"} Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.690892 4876 scope.go:117] "RemoveContainer" containerID="4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.701948 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.718212 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.722902 4876 scope.go:117] "RemoveContainer" containerID="6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b" Dec 15 08:53:01 crc kubenswrapper[4876]: E1215 08:53:01.724097 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b\": container with ID starting with 6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b not found: ID does not exist" containerID="6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.724161 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b"} err="failed to get container status \"6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b\": rpc error: code = NotFound desc = could not find container \"6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b\": container with ID starting with 6ea25609932b16b310c56cda11253e3ac58627709ee7a3d39ea18272bfc0821b not found: ID does not exist" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.724193 4876 scope.go:117] "RemoveContainer" containerID="4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d" Dec 15 08:53:01 crc kubenswrapper[4876]: E1215 08:53:01.724528 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d\": container with ID starting with 4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d not found: ID does not exist" containerID="4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.724556 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d"} err="failed to get container status \"4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d\": rpc error: code = NotFound desc = could not find container \"4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d\": container with ID starting with 4b871bcdc5fc7e4537deddc824854fcb6f52780660c78bb55f08a45c5f963f1d not found: ID does not exist" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.724575 4876 scope.go:117] "RemoveContainer" containerID="02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.732467 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:53:01 crc kubenswrapper[4876]: E1215 08:53:01.732924 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="012bf717-5e17-4143-89f4-bf2f57726453" containerName="nova-metadata-metadata" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.732944 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="012bf717-5e17-4143-89f4-bf2f57726453" containerName="nova-metadata-metadata" Dec 15 08:53:01 crc kubenswrapper[4876]: E1215 08:53:01.732961 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerName="nova-api-log" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.732968 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerName="nova-api-log" Dec 15 08:53:01 crc kubenswrapper[4876]: E1215 08:53:01.732983 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="012bf717-5e17-4143-89f4-bf2f57726453" containerName="nova-metadata-log" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.732990 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="012bf717-5e17-4143-89f4-bf2f57726453" containerName="nova-metadata-log" Dec 15 08:53:01 crc kubenswrapper[4876]: E1215 08:53:01.733002 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerName="nova-api-api" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.733009 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerName="nova-api-api" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.733260 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerName="nova-api-api" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.733273 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="012bf717-5e17-4143-89f4-bf2f57726453" containerName="nova-metadata-metadata" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.733288 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="012bf717-5e17-4143-89f4-bf2f57726453" containerName="nova-metadata-log" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.733301 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" containerName="nova-api-log" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.734339 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.746038 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.746118 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.759751 4876 scope.go:117] "RemoveContainer" containerID="2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.765605 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.791492 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.802365 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.805184 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.808199 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.809373 4876 scope.go:117] "RemoveContainer" containerID="02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5" Dec 15 08:53:01 crc kubenswrapper[4876]: E1215 08:53:01.809922 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5\": container with ID starting with 02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5 not found: ID does not exist" containerID="02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.809964 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5"} err="failed to get container status \"02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5\": rpc error: code = NotFound desc = could not find container \"02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5\": container with ID starting with 02593aa93a0428d2d138dbef01c21a1b7778dd6bfc1a1d4515f56742762e1dd5 not found: ID does not exist" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.809991 4876 scope.go:117] "RemoveContainer" containerID="2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e" Dec 15 08:53:01 crc kubenswrapper[4876]: E1215 08:53:01.811953 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e\": container with ID starting with 2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e not found: ID does not exist" containerID="2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.812034 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e"} err="failed to get container status \"2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e\": rpc error: code = NotFound desc = could not find container \"2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e\": container with ID starting with 2a1eb9cd108fcb3bd26ea1ca07c2eed65205ee7bf8ecb3274872ce1b7adeb68e not found: ID does not exist" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.824728 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.863811 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-config-data\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.864362 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.864390 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcmqp\" (UniqueName: \"kubernetes.io/projected/9c78a2bb-e344-429f-8dc2-ecef01045ffe-kube-api-access-tcmqp\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.864534 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c78a2bb-e344-429f-8dc2-ecef01045ffe-logs\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.966671 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-config-data\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.967037 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.967063 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45dsm\" (UniqueName: \"kubernetes.io/projected/d149f9fe-529b-4652-b507-0e1e1821ace2-kube-api-access-45dsm\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.967091 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.967127 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcmqp\" (UniqueName: \"kubernetes.io/projected/9c78a2bb-e344-429f-8dc2-ecef01045ffe-kube-api-access-tcmqp\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.967170 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d149f9fe-529b-4652-b507-0e1e1821ace2-logs\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.967193 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-config-data\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.967262 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c78a2bb-e344-429f-8dc2-ecef01045ffe-logs\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.967763 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c78a2bb-e344-429f-8dc2-ecef01045ffe-logs\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.972627 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-config-data\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:01 crc kubenswrapper[4876]: I1215 08:53:01.974508 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.002376 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcmqp\" (UniqueName: \"kubernetes.io/projected/9c78a2bb-e344-429f-8dc2-ecef01045ffe-kube-api-access-tcmqp\") pod \"nova-metadata-0\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " pod="openstack/nova-metadata-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.069394 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.069434 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45dsm\" (UniqueName: \"kubernetes.io/projected/d149f9fe-529b-4652-b507-0e1e1821ace2-kube-api-access-45dsm\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.069470 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d149f9fe-529b-4652-b507-0e1e1821ace2-logs\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.069488 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-config-data\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.070125 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d149f9fe-529b-4652-b507-0e1e1821ace2-logs\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.076907 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-config-data\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.077379 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.092788 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.107464 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45dsm\" (UniqueName: \"kubernetes.io/projected/d149f9fe-529b-4652-b507-0e1e1821ace2-kube-api-access-45dsm\") pod \"nova-api-0\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " pod="openstack/nova-api-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.141579 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.644058 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.659764 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:53:02 crc kubenswrapper[4876]: W1215 08:53:02.675745 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c78a2bb_e344_429f_8dc2_ecef01045ffe.slice/crio-bc8db74515bfda8158dce85831f7cd726ed56a0bcc3df6757b52660a31a45578 WatchSource:0}: Error finding container bc8db74515bfda8158dce85831f7cd726ed56a0bcc3df6757b52660a31a45578: Status 404 returned error can't find the container with id bc8db74515bfda8158dce85831f7cd726ed56a0bcc3df6757b52660a31a45578 Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.698186 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bd54c160-5bef-46b5-9485-e042399e3330","Type":"ContainerStarted","Data":"20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a"} Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.701244 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d149f9fe-529b-4652-b507-0e1e1821ace2","Type":"ContainerStarted","Data":"11be1b51cd260f2ca65277afd3469d136bf20edad5e32d88f176ba524afa2965"} Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.721328 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="012bf717-5e17-4143-89f4-bf2f57726453" path="/var/lib/kubelet/pods/012bf717-5e17-4143-89f4-bf2f57726453/volumes" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.721646 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.721631727 podStartE2EDuration="2.721631727s" podCreationTimestamp="2025-12-15 08:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:53:02.717458745 +0000 UTC m=+7308.288601676" watchObservedRunningTime="2025-12-15 08:53:02.721631727 +0000 UTC m=+7308.292774638" Dec 15 08:53:02 crc kubenswrapper[4876]: I1215 08:53:02.722268 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39692525-cc0a-49c8-b3b6-d1003f02d768" path="/var/lib/kubelet/pods/39692525-cc0a-49c8-b3b6-d1003f02d768/volumes" Dec 15 08:53:03 crc kubenswrapper[4876]: I1215 08:53:03.706321 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:53:03 crc kubenswrapper[4876]: E1215 08:53:03.706850 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:53:03 crc kubenswrapper[4876]: I1215 08:53:03.714244 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9c78a2bb-e344-429f-8dc2-ecef01045ffe","Type":"ContainerStarted","Data":"7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345"} Dec 15 08:53:03 crc kubenswrapper[4876]: I1215 08:53:03.714278 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9c78a2bb-e344-429f-8dc2-ecef01045ffe","Type":"ContainerStarted","Data":"e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d"} Dec 15 08:53:03 crc kubenswrapper[4876]: I1215 08:53:03.714289 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9c78a2bb-e344-429f-8dc2-ecef01045ffe","Type":"ContainerStarted","Data":"bc8db74515bfda8158dce85831f7cd726ed56a0bcc3df6757b52660a31a45578"} Dec 15 08:53:03 crc kubenswrapper[4876]: I1215 08:53:03.716172 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d149f9fe-529b-4652-b507-0e1e1821ace2","Type":"ContainerStarted","Data":"f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39"} Dec 15 08:53:03 crc kubenswrapper[4876]: I1215 08:53:03.716276 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d149f9fe-529b-4652-b507-0e1e1821ace2","Type":"ContainerStarted","Data":"9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6"} Dec 15 08:53:03 crc kubenswrapper[4876]: I1215 08:53:03.731692 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.7316737 podStartE2EDuration="2.7316737s" podCreationTimestamp="2025-12-15 08:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:53:03.728881905 +0000 UTC m=+7309.300024826" watchObservedRunningTime="2025-12-15 08:53:03.7316737 +0000 UTC m=+7309.302816601" Dec 15 08:53:03 crc kubenswrapper[4876]: I1215 08:53:03.765552 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.76553279 podStartE2EDuration="2.76553279s" podCreationTimestamp="2025-12-15 08:53:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:53:03.757423193 +0000 UTC m=+7309.328566114" watchObservedRunningTime="2025-12-15 08:53:03.76553279 +0000 UTC m=+7309.336675701" Dec 15 08:53:06 crc kubenswrapper[4876]: I1215 08:53:06.064665 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 15 08:53:07 crc kubenswrapper[4876]: I1215 08:53:07.093955 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 08:53:07 crc kubenswrapper[4876]: I1215 08:53:07.094009 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 08:53:11 crc kubenswrapper[4876]: I1215 08:53:11.064833 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 15 08:53:11 crc kubenswrapper[4876]: I1215 08:53:11.091458 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 15 08:53:11 crc kubenswrapper[4876]: I1215 08:53:11.822213 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 15 08:53:12 crc kubenswrapper[4876]: I1215 08:53:12.094298 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 08:53:12 crc kubenswrapper[4876]: I1215 08:53:12.094344 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 08:53:12 crc kubenswrapper[4876]: I1215 08:53:12.142737 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 08:53:12 crc kubenswrapper[4876]: I1215 08:53:12.142793 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 08:53:13 crc kubenswrapper[4876]: I1215 08:53:13.176400 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.82:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:53:13 crc kubenswrapper[4876]: I1215 08:53:13.176399 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.82:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:53:13 crc kubenswrapper[4876]: I1215 08:53:13.258342 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.83:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:53:13 crc kubenswrapper[4876]: I1215 08:53:13.258340 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.83:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:53:18 crc kubenswrapper[4876]: I1215 08:53:18.705485 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:53:18 crc kubenswrapper[4876]: E1215 08:53:18.706270 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.096544 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.097036 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.099835 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.100591 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.152196 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.152266 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.152752 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.152779 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.160526 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.160610 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.370855 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c6b75bdc5-4ds9k"] Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.372286 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.390674 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6b75bdc5-4ds9k"] Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.497021 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-config\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.497076 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.497171 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.497214 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlpxk\" (UniqueName: \"kubernetes.io/projected/25132660-5261-408f-8134-665f6d60af9e-kube-api-access-dlpxk\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.497237 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-dns-svc\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.599900 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.599992 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlpxk\" (UniqueName: \"kubernetes.io/projected/25132660-5261-408f-8134-665f6d60af9e-kube-api-access-dlpxk\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.600033 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-dns-svc\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.600128 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-config\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.600157 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.601149 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.601837 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.601907 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-dns-svc\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.602013 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-config\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.627427 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlpxk\" (UniqueName: \"kubernetes.io/projected/25132660-5261-408f-8134-665f6d60af9e-kube-api-access-dlpxk\") pod \"dnsmasq-dns-6c6b75bdc5-4ds9k\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:22 crc kubenswrapper[4876]: I1215 08:53:22.732641 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:23 crc kubenswrapper[4876]: I1215 08:53:23.227441 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6b75bdc5-4ds9k"] Dec 15 08:53:23 crc kubenswrapper[4876]: W1215 08:53:23.232423 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25132660_5261_408f_8134_665f6d60af9e.slice/crio-6680f0bc44514d5e145d3be56ad5e58b755722e2632bafbd2747beb39860d511 WatchSource:0}: Error finding container 6680f0bc44514d5e145d3be56ad5e58b755722e2632bafbd2747beb39860d511: Status 404 returned error can't find the container with id 6680f0bc44514d5e145d3be56ad5e58b755722e2632bafbd2747beb39860d511 Dec 15 08:53:23 crc kubenswrapper[4876]: I1215 08:53:23.894411 4876 generic.go:334] "Generic (PLEG): container finished" podID="25132660-5261-408f-8134-665f6d60af9e" containerID="fd0423da8ed140ad77dd41313d2dd3560f04ac6ed63f1ba1e0746f270e281121" exitCode=0 Dec 15 08:53:23 crc kubenswrapper[4876]: I1215 08:53:23.894469 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" event={"ID":"25132660-5261-408f-8134-665f6d60af9e","Type":"ContainerDied","Data":"fd0423da8ed140ad77dd41313d2dd3560f04ac6ed63f1ba1e0746f270e281121"} Dec 15 08:53:23 crc kubenswrapper[4876]: I1215 08:53:23.894954 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" event={"ID":"25132660-5261-408f-8134-665f6d60af9e","Type":"ContainerStarted","Data":"6680f0bc44514d5e145d3be56ad5e58b755722e2632bafbd2747beb39860d511"} Dec 15 08:53:24 crc kubenswrapper[4876]: I1215 08:53:24.912603 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" event={"ID":"25132660-5261-408f-8134-665f6d60af9e","Type":"ContainerStarted","Data":"e58b1770c521ff81ff626d86138ff7cff5c163f2aca4f095daaf6888810dadcc"} Dec 15 08:53:24 crc kubenswrapper[4876]: I1215 08:53:24.914170 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:24 crc kubenswrapper[4876]: I1215 08:53:24.931879 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" podStartSLOduration=2.931862926 podStartE2EDuration="2.931862926s" podCreationTimestamp="2025-12-15 08:53:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:53:24.931035873 +0000 UTC m=+7330.502178814" watchObservedRunningTime="2025-12-15 08:53:24.931862926 +0000 UTC m=+7330.503005837" Dec 15 08:53:29 crc kubenswrapper[4876]: I1215 08:53:29.706070 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:53:29 crc kubenswrapper[4876]: I1215 08:53:29.959176 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"29d85bfa3ef5674dabcb7af04441e135ceccc897cf5ebfb55f5a95b2fb0e0285"} Dec 15 08:53:32 crc kubenswrapper[4876]: I1215 08:53:32.738264 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:53:32 crc kubenswrapper[4876]: I1215 08:53:32.804863 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c57649dd5-pb6qb"] Dec 15 08:53:32 crc kubenswrapper[4876]: I1215 08:53:32.805179 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" podUID="baae51f3-1e71-48bb-a9c3-efbc282f8132" containerName="dnsmasq-dns" containerID="cri-o://b97565fbc81a3654ac9590ce2325492d6e25822361092acfb1c6b81ffa47d936" gracePeriod=10 Dec 15 08:53:32 crc kubenswrapper[4876]: I1215 08:53:32.994467 4876 generic.go:334] "Generic (PLEG): container finished" podID="baae51f3-1e71-48bb-a9c3-efbc282f8132" containerID="b97565fbc81a3654ac9590ce2325492d6e25822361092acfb1c6b81ffa47d936" exitCode=0 Dec 15 08:53:32 crc kubenswrapper[4876]: I1215 08:53:32.994518 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" event={"ID":"baae51f3-1e71-48bb-a9c3-efbc282f8132","Type":"ContainerDied","Data":"b97565fbc81a3654ac9590ce2325492d6e25822361092acfb1c6b81ffa47d936"} Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.278634 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.404805 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-sb\") pod \"baae51f3-1e71-48bb-a9c3-efbc282f8132\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.404952 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-nb\") pod \"baae51f3-1e71-48bb-a9c3-efbc282f8132\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.404989 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-dns-svc\") pod \"baae51f3-1e71-48bb-a9c3-efbc282f8132\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.405075 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8vkd\" (UniqueName: \"kubernetes.io/projected/baae51f3-1e71-48bb-a9c3-efbc282f8132-kube-api-access-k8vkd\") pod \"baae51f3-1e71-48bb-a9c3-efbc282f8132\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.405262 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-config\") pod \"baae51f3-1e71-48bb-a9c3-efbc282f8132\" (UID: \"baae51f3-1e71-48bb-a9c3-efbc282f8132\") " Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.410530 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baae51f3-1e71-48bb-a9c3-efbc282f8132-kube-api-access-k8vkd" (OuterVolumeSpecName: "kube-api-access-k8vkd") pod "baae51f3-1e71-48bb-a9c3-efbc282f8132" (UID: "baae51f3-1e71-48bb-a9c3-efbc282f8132"). InnerVolumeSpecName "kube-api-access-k8vkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.447430 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "baae51f3-1e71-48bb-a9c3-efbc282f8132" (UID: "baae51f3-1e71-48bb-a9c3-efbc282f8132"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.448673 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "baae51f3-1e71-48bb-a9c3-efbc282f8132" (UID: "baae51f3-1e71-48bb-a9c3-efbc282f8132"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.451742 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "baae51f3-1e71-48bb-a9c3-efbc282f8132" (UID: "baae51f3-1e71-48bb-a9c3-efbc282f8132"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.452929 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-config" (OuterVolumeSpecName: "config") pod "baae51f3-1e71-48bb-a9c3-efbc282f8132" (UID: "baae51f3-1e71-48bb-a9c3-efbc282f8132"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.507218 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.507254 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8vkd\" (UniqueName: \"kubernetes.io/projected/baae51f3-1e71-48bb-a9c3-efbc282f8132-kube-api-access-k8vkd\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.507264 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.507275 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:33 crc kubenswrapper[4876]: I1215 08:53:33.507283 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/baae51f3-1e71-48bb-a9c3-efbc282f8132-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.005080 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" event={"ID":"baae51f3-1e71-48bb-a9c3-efbc282f8132","Type":"ContainerDied","Data":"21ded807cc38d561af80f61cb48b394b8aaec814c1312b2627d28b3d85f1c4c7"} Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.005157 4876 scope.go:117] "RemoveContainer" containerID="b97565fbc81a3654ac9590ce2325492d6e25822361092acfb1c6b81ffa47d936" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.005186 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c57649dd5-pb6qb" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.035307 4876 scope.go:117] "RemoveContainer" containerID="a1ee7d4f9dfab2f5d1f1becb07d7c6ee8d7e587b8193b9ca2c13b01a2fd3c241" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.041057 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c57649dd5-pb6qb"] Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.049858 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c57649dd5-pb6qb"] Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.520024 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-lpgkh"] Dec 15 08:53:34 crc kubenswrapper[4876]: E1215 08:53:34.520761 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baae51f3-1e71-48bb-a9c3-efbc282f8132" containerName="dnsmasq-dns" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.520783 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="baae51f3-1e71-48bb-a9c3-efbc282f8132" containerName="dnsmasq-dns" Dec 15 08:53:34 crc kubenswrapper[4876]: E1215 08:53:34.520807 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baae51f3-1e71-48bb-a9c3-efbc282f8132" containerName="init" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.520814 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="baae51f3-1e71-48bb-a9c3-efbc282f8132" containerName="init" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.520994 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="baae51f3-1e71-48bb-a9c3-efbc282f8132" containerName="dnsmasq-dns" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.521701 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lpgkh" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.528020 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lpgkh"] Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.617039 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-6125-account-create-update-zskjb"] Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.618547 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6125-account-create-update-zskjb" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.621231 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.627142 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6c55609-894a-484f-88ce-6653d44bb623-operator-scripts\") pod \"cinder-db-create-lpgkh\" (UID: \"b6c55609-894a-484f-88ce-6653d44bb623\") " pod="openstack/cinder-db-create-lpgkh" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.627218 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-6125-account-create-update-zskjb"] Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.627314 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcdvn\" (UniqueName: \"kubernetes.io/projected/b6c55609-894a-484f-88ce-6653d44bb623-kube-api-access-rcdvn\") pod \"cinder-db-create-lpgkh\" (UID: \"b6c55609-894a-484f-88ce-6653d44bb623\") " pod="openstack/cinder-db-create-lpgkh" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.717838 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="baae51f3-1e71-48bb-a9c3-efbc282f8132" path="/var/lib/kubelet/pods/baae51f3-1e71-48bb-a9c3-efbc282f8132/volumes" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.728803 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcdvn\" (UniqueName: \"kubernetes.io/projected/b6c55609-894a-484f-88ce-6653d44bb623-kube-api-access-rcdvn\") pod \"cinder-db-create-lpgkh\" (UID: \"b6c55609-894a-484f-88ce-6653d44bb623\") " pod="openstack/cinder-db-create-lpgkh" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.728856 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44e653a9-8f82-45ab-93ac-d7484c3a7b28-operator-scripts\") pod \"cinder-6125-account-create-update-zskjb\" (UID: \"44e653a9-8f82-45ab-93ac-d7484c3a7b28\") " pod="openstack/cinder-6125-account-create-update-zskjb" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.728901 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c6lw\" (UniqueName: \"kubernetes.io/projected/44e653a9-8f82-45ab-93ac-d7484c3a7b28-kube-api-access-7c6lw\") pod \"cinder-6125-account-create-update-zskjb\" (UID: \"44e653a9-8f82-45ab-93ac-d7484c3a7b28\") " pod="openstack/cinder-6125-account-create-update-zskjb" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.728959 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6c55609-894a-484f-88ce-6653d44bb623-operator-scripts\") pod \"cinder-db-create-lpgkh\" (UID: \"b6c55609-894a-484f-88ce-6653d44bb623\") " pod="openstack/cinder-db-create-lpgkh" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.729632 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6c55609-894a-484f-88ce-6653d44bb623-operator-scripts\") pod \"cinder-db-create-lpgkh\" (UID: \"b6c55609-894a-484f-88ce-6653d44bb623\") " pod="openstack/cinder-db-create-lpgkh" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.746590 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcdvn\" (UniqueName: \"kubernetes.io/projected/b6c55609-894a-484f-88ce-6653d44bb623-kube-api-access-rcdvn\") pod \"cinder-db-create-lpgkh\" (UID: \"b6c55609-894a-484f-88ce-6653d44bb623\") " pod="openstack/cinder-db-create-lpgkh" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.833200 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c6lw\" (UniqueName: \"kubernetes.io/projected/44e653a9-8f82-45ab-93ac-d7484c3a7b28-kube-api-access-7c6lw\") pod \"cinder-6125-account-create-update-zskjb\" (UID: \"44e653a9-8f82-45ab-93ac-d7484c3a7b28\") " pod="openstack/cinder-6125-account-create-update-zskjb" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.834066 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44e653a9-8f82-45ab-93ac-d7484c3a7b28-operator-scripts\") pod \"cinder-6125-account-create-update-zskjb\" (UID: \"44e653a9-8f82-45ab-93ac-d7484c3a7b28\") " pod="openstack/cinder-6125-account-create-update-zskjb" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.834828 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44e653a9-8f82-45ab-93ac-d7484c3a7b28-operator-scripts\") pod \"cinder-6125-account-create-update-zskjb\" (UID: \"44e653a9-8f82-45ab-93ac-d7484c3a7b28\") " pod="openstack/cinder-6125-account-create-update-zskjb" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.849222 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c6lw\" (UniqueName: \"kubernetes.io/projected/44e653a9-8f82-45ab-93ac-d7484c3a7b28-kube-api-access-7c6lw\") pod \"cinder-6125-account-create-update-zskjb\" (UID: \"44e653a9-8f82-45ab-93ac-d7484c3a7b28\") " pod="openstack/cinder-6125-account-create-update-zskjb" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.875527 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lpgkh" Dec 15 08:53:34 crc kubenswrapper[4876]: I1215 08:53:34.935812 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6125-account-create-update-zskjb" Dec 15 08:53:35 crc kubenswrapper[4876]: I1215 08:53:35.353493 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-lpgkh"] Dec 15 08:53:35 crc kubenswrapper[4876]: I1215 08:53:35.440467 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-6125-account-create-update-zskjb"] Dec 15 08:53:35 crc kubenswrapper[4876]: W1215 08:53:35.441647 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44e653a9_8f82_45ab_93ac_d7484c3a7b28.slice/crio-5f6091c67224fdd6fee92bd91fd47ebb684e9ff32ed27b750e72203081fb2f6f WatchSource:0}: Error finding container 5f6091c67224fdd6fee92bd91fd47ebb684e9ff32ed27b750e72203081fb2f6f: Status 404 returned error can't find the container with id 5f6091c67224fdd6fee92bd91fd47ebb684e9ff32ed27b750e72203081fb2f6f Dec 15 08:53:36 crc kubenswrapper[4876]: I1215 08:53:36.049816 4876 generic.go:334] "Generic (PLEG): container finished" podID="44e653a9-8f82-45ab-93ac-d7484c3a7b28" containerID="2fb89f1d9de892b7f59279efd40ad409b53c34610f3a35fd5da9c4e6ea1d1b70" exitCode=0 Dec 15 08:53:36 crc kubenswrapper[4876]: I1215 08:53:36.049871 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6125-account-create-update-zskjb" event={"ID":"44e653a9-8f82-45ab-93ac-d7484c3a7b28","Type":"ContainerDied","Data":"2fb89f1d9de892b7f59279efd40ad409b53c34610f3a35fd5da9c4e6ea1d1b70"} Dec 15 08:53:36 crc kubenswrapper[4876]: I1215 08:53:36.050204 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6125-account-create-update-zskjb" event={"ID":"44e653a9-8f82-45ab-93ac-d7484c3a7b28","Type":"ContainerStarted","Data":"5f6091c67224fdd6fee92bd91fd47ebb684e9ff32ed27b750e72203081fb2f6f"} Dec 15 08:53:36 crc kubenswrapper[4876]: I1215 08:53:36.051647 4876 generic.go:334] "Generic (PLEG): container finished" podID="b6c55609-894a-484f-88ce-6653d44bb623" containerID="730356d5aab15796a485712045094bd92d18a3cb2e8185cafe816d1e10244d6f" exitCode=0 Dec 15 08:53:36 crc kubenswrapper[4876]: I1215 08:53:36.051685 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lpgkh" event={"ID":"b6c55609-894a-484f-88ce-6653d44bb623","Type":"ContainerDied","Data":"730356d5aab15796a485712045094bd92d18a3cb2e8185cafe816d1e10244d6f"} Dec 15 08:53:36 crc kubenswrapper[4876]: I1215 08:53:36.051727 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lpgkh" event={"ID":"b6c55609-894a-484f-88ce-6653d44bb623","Type":"ContainerStarted","Data":"f35b3decab7b5c4754a605316889d6d9d6576fe6771af510de4a86eb95a48d03"} Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.538838 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lpgkh" Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.621123 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6125-account-create-update-zskjb" Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.664012 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44e653a9-8f82-45ab-93ac-d7484c3a7b28-operator-scripts\") pod \"44e653a9-8f82-45ab-93ac-d7484c3a7b28\" (UID: \"44e653a9-8f82-45ab-93ac-d7484c3a7b28\") " Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.664210 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c6lw\" (UniqueName: \"kubernetes.io/projected/44e653a9-8f82-45ab-93ac-d7484c3a7b28-kube-api-access-7c6lw\") pod \"44e653a9-8f82-45ab-93ac-d7484c3a7b28\" (UID: \"44e653a9-8f82-45ab-93ac-d7484c3a7b28\") " Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.664469 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcdvn\" (UniqueName: \"kubernetes.io/projected/b6c55609-894a-484f-88ce-6653d44bb623-kube-api-access-rcdvn\") pod \"b6c55609-894a-484f-88ce-6653d44bb623\" (UID: \"b6c55609-894a-484f-88ce-6653d44bb623\") " Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.664509 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6c55609-894a-484f-88ce-6653d44bb623-operator-scripts\") pod \"b6c55609-894a-484f-88ce-6653d44bb623\" (UID: \"b6c55609-894a-484f-88ce-6653d44bb623\") " Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.664876 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44e653a9-8f82-45ab-93ac-d7484c3a7b28-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "44e653a9-8f82-45ab-93ac-d7484c3a7b28" (UID: "44e653a9-8f82-45ab-93ac-d7484c3a7b28"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.666129 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6c55609-894a-484f-88ce-6653d44bb623-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b6c55609-894a-484f-88ce-6653d44bb623" (UID: "b6c55609-894a-484f-88ce-6653d44bb623"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.669921 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6c55609-894a-484f-88ce-6653d44bb623-kube-api-access-rcdvn" (OuterVolumeSpecName: "kube-api-access-rcdvn") pod "b6c55609-894a-484f-88ce-6653d44bb623" (UID: "b6c55609-894a-484f-88ce-6653d44bb623"). InnerVolumeSpecName "kube-api-access-rcdvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.670682 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44e653a9-8f82-45ab-93ac-d7484c3a7b28-kube-api-access-7c6lw" (OuterVolumeSpecName: "kube-api-access-7c6lw") pod "44e653a9-8f82-45ab-93ac-d7484c3a7b28" (UID: "44e653a9-8f82-45ab-93ac-d7484c3a7b28"). InnerVolumeSpecName "kube-api-access-7c6lw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.767498 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcdvn\" (UniqueName: \"kubernetes.io/projected/b6c55609-894a-484f-88ce-6653d44bb623-kube-api-access-rcdvn\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.767684 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6c55609-894a-484f-88ce-6653d44bb623-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.767701 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44e653a9-8f82-45ab-93ac-d7484c3a7b28-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:37 crc kubenswrapper[4876]: I1215 08:53:37.767711 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c6lw\" (UniqueName: \"kubernetes.io/projected/44e653a9-8f82-45ab-93ac-d7484c3a7b28-kube-api-access-7c6lw\") on node \"crc\" DevicePath \"\"" Dec 15 08:53:38 crc kubenswrapper[4876]: I1215 08:53:38.175967 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6125-account-create-update-zskjb" event={"ID":"44e653a9-8f82-45ab-93ac-d7484c3a7b28","Type":"ContainerDied","Data":"5f6091c67224fdd6fee92bd91fd47ebb684e9ff32ed27b750e72203081fb2f6f"} Dec 15 08:53:38 crc kubenswrapper[4876]: I1215 08:53:38.176021 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f6091c67224fdd6fee92bd91fd47ebb684e9ff32ed27b750e72203081fb2f6f" Dec 15 08:53:38 crc kubenswrapper[4876]: I1215 08:53:38.175983 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6125-account-create-update-zskjb" Dec 15 08:53:38 crc kubenswrapper[4876]: I1215 08:53:38.177877 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-lpgkh" event={"ID":"b6c55609-894a-484f-88ce-6653d44bb623","Type":"ContainerDied","Data":"f35b3decab7b5c4754a605316889d6d9d6576fe6771af510de4a86eb95a48d03"} Dec 15 08:53:38 crc kubenswrapper[4876]: I1215 08:53:38.177921 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f35b3decab7b5c4754a605316889d6d9d6576fe6771af510de4a86eb95a48d03" Dec 15 08:53:38 crc kubenswrapper[4876]: I1215 08:53:38.177946 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-lpgkh" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.861611 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-7v4vr"] Dec 15 08:53:39 crc kubenswrapper[4876]: E1215 08:53:39.862652 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e653a9-8f82-45ab-93ac-d7484c3a7b28" containerName="mariadb-account-create-update" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.862671 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e653a9-8f82-45ab-93ac-d7484c3a7b28" containerName="mariadb-account-create-update" Dec 15 08:53:39 crc kubenswrapper[4876]: E1215 08:53:39.862709 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6c55609-894a-484f-88ce-6653d44bb623" containerName="mariadb-database-create" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.862719 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6c55609-894a-484f-88ce-6653d44bb623" containerName="mariadb-database-create" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.862928 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6c55609-894a-484f-88ce-6653d44bb623" containerName="mariadb-database-create" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.862962 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="44e653a9-8f82-45ab-93ac-d7484c3a7b28" containerName="mariadb-account-create-update" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.863732 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.866365 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.866614 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.866892 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-vr2p4" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.873397 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7v4vr"] Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.905764 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-scripts\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.905819 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-combined-ca-bundle\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.905861 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-config-data\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.905923 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9039d1f0-cf45-4e80-b0d9-230d64432bb0-etc-machine-id\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.906002 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56f4s\" (UniqueName: \"kubernetes.io/projected/9039d1f0-cf45-4e80-b0d9-230d64432bb0-kube-api-access-56f4s\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:39 crc kubenswrapper[4876]: I1215 08:53:39.906051 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-db-sync-config-data\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.007497 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-config-data\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.007613 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9039d1f0-cf45-4e80-b0d9-230d64432bb0-etc-machine-id\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.007666 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9039d1f0-cf45-4e80-b0d9-230d64432bb0-etc-machine-id\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.007696 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56f4s\" (UniqueName: \"kubernetes.io/projected/9039d1f0-cf45-4e80-b0d9-230d64432bb0-kube-api-access-56f4s\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.007747 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-db-sync-config-data\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.007782 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-scripts\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.007811 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-combined-ca-bundle\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.013804 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-db-sync-config-data\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.013959 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-scripts\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.014271 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-combined-ca-bundle\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.014356 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-config-data\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.030452 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56f4s\" (UniqueName: \"kubernetes.io/projected/9039d1f0-cf45-4e80-b0d9-230d64432bb0-kube-api-access-56f4s\") pod \"cinder-db-sync-7v4vr\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.183964 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.701066 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7v4vr"] Dec 15 08:53:40 crc kubenswrapper[4876]: W1215 08:53:40.717091 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9039d1f0_cf45_4e80_b0d9_230d64432bb0.slice/crio-5d7d403efa7f45e3b8dcaed9b75bb395a316ff911d2d1485685932731d901d3d WatchSource:0}: Error finding container 5d7d403efa7f45e3b8dcaed9b75bb395a316ff911d2d1485685932731d901d3d: Status 404 returned error can't find the container with id 5d7d403efa7f45e3b8dcaed9b75bb395a316ff911d2d1485685932731d901d3d Dec 15 08:53:40 crc kubenswrapper[4876]: I1215 08:53:40.719962 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 08:53:41 crc kubenswrapper[4876]: I1215 08:53:41.204137 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7v4vr" event={"ID":"9039d1f0-cf45-4e80-b0d9-230d64432bb0","Type":"ContainerStarted","Data":"5d7d403efa7f45e3b8dcaed9b75bb395a316ff911d2d1485685932731d901d3d"} Dec 15 08:54:00 crc kubenswrapper[4876]: I1215 08:54:00.382518 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7v4vr" event={"ID":"9039d1f0-cf45-4e80-b0d9-230d64432bb0","Type":"ContainerStarted","Data":"99fcdd2e28169f39a6fc1ed1b6f4abf1cd0cac0004e7ee1f4bc07fcaace7f127"} Dec 15 08:54:00 crc kubenswrapper[4876]: I1215 08:54:00.423166 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-7v4vr" podStartSLOduration=2.632978512 podStartE2EDuration="21.4231416s" podCreationTimestamp="2025-12-15 08:53:39 +0000 UTC" firstStartedPulling="2025-12-15 08:53:40.719466273 +0000 UTC m=+7346.290609184" lastFinishedPulling="2025-12-15 08:53:59.509629351 +0000 UTC m=+7365.080772272" observedRunningTime="2025-12-15 08:54:00.419146113 +0000 UTC m=+7365.990289044" watchObservedRunningTime="2025-12-15 08:54:00.4231416 +0000 UTC m=+7365.994284531" Dec 15 08:54:03 crc kubenswrapper[4876]: I1215 08:54:03.417486 4876 generic.go:334] "Generic (PLEG): container finished" podID="9039d1f0-cf45-4e80-b0d9-230d64432bb0" containerID="99fcdd2e28169f39a6fc1ed1b6f4abf1cd0cac0004e7ee1f4bc07fcaace7f127" exitCode=0 Dec 15 08:54:03 crc kubenswrapper[4876]: I1215 08:54:03.417590 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7v4vr" event={"ID":"9039d1f0-cf45-4e80-b0d9-230d64432bb0","Type":"ContainerDied","Data":"99fcdd2e28169f39a6fc1ed1b6f4abf1cd0cac0004e7ee1f4bc07fcaace7f127"} Dec 15 08:54:04 crc kubenswrapper[4876]: I1215 08:54:04.747446 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.004254 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-config-data\") pod \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.004596 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-scripts\") pod \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.004678 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-db-sync-config-data\") pod \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.004710 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9039d1f0-cf45-4e80-b0d9-230d64432bb0-etc-machine-id\") pod \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.004762 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-combined-ca-bundle\") pod \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.004797 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56f4s\" (UniqueName: \"kubernetes.io/projected/9039d1f0-cf45-4e80-b0d9-230d64432bb0-kube-api-access-56f4s\") pod \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\" (UID: \"9039d1f0-cf45-4e80-b0d9-230d64432bb0\") " Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.004814 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9039d1f0-cf45-4e80-b0d9-230d64432bb0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9039d1f0-cf45-4e80-b0d9-230d64432bb0" (UID: "9039d1f0-cf45-4e80-b0d9-230d64432bb0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.005438 4876 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9039d1f0-cf45-4e80-b0d9-230d64432bb0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.010227 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "9039d1f0-cf45-4e80-b0d9-230d64432bb0" (UID: "9039d1f0-cf45-4e80-b0d9-230d64432bb0"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.010520 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-scripts" (OuterVolumeSpecName: "scripts") pod "9039d1f0-cf45-4e80-b0d9-230d64432bb0" (UID: "9039d1f0-cf45-4e80-b0d9-230d64432bb0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.011275 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9039d1f0-cf45-4e80-b0d9-230d64432bb0-kube-api-access-56f4s" (OuterVolumeSpecName: "kube-api-access-56f4s") pod "9039d1f0-cf45-4e80-b0d9-230d64432bb0" (UID: "9039d1f0-cf45-4e80-b0d9-230d64432bb0"). InnerVolumeSpecName "kube-api-access-56f4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.028322 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9039d1f0-cf45-4e80-b0d9-230d64432bb0" (UID: "9039d1f0-cf45-4e80-b0d9-230d64432bb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.052356 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-config-data" (OuterVolumeSpecName: "config-data") pod "9039d1f0-cf45-4e80-b0d9-230d64432bb0" (UID: "9039d1f0-cf45-4e80-b0d9-230d64432bb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.106701 4876 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.106740 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.106750 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56f4s\" (UniqueName: \"kubernetes.io/projected/9039d1f0-cf45-4e80-b0d9-230d64432bb0-kube-api-access-56f4s\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.106763 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.106772 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9039d1f0-cf45-4e80-b0d9-230d64432bb0-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.435585 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7v4vr" event={"ID":"9039d1f0-cf45-4e80-b0d9-230d64432bb0","Type":"ContainerDied","Data":"5d7d403efa7f45e3b8dcaed9b75bb395a316ff911d2d1485685932731d901d3d"} Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.435636 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d7d403efa7f45e3b8dcaed9b75bb395a316ff911d2d1485685932731d901d3d" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.435662 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7v4vr" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.762610 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9b7c74d49-6d99r"] Dec 15 08:54:05 crc kubenswrapper[4876]: E1215 08:54:05.763216 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9039d1f0-cf45-4e80-b0d9-230d64432bb0" containerName="cinder-db-sync" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.763226 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9039d1f0-cf45-4e80-b0d9-230d64432bb0" containerName="cinder-db-sync" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.763415 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9039d1f0-cf45-4e80-b0d9-230d64432bb0" containerName="cinder-db-sync" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.770504 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.780364 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9b7c74d49-6d99r"] Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.819595 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-nb\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.819749 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-dns-svc\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.819890 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-config\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.819981 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-sb\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.820045 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6ff6\" (UniqueName: \"kubernetes.io/projected/07694812-cd06-4758-9694-5c522ed85202-kube-api-access-x6ff6\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.897847 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.899781 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.901806 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.901806 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-vr2p4" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.902097 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.902747 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.910350 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.921720 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-nb\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.921824 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnjjm\" (UniqueName: \"kubernetes.io/projected/e75b642c-ef02-47f7-a262-ee9699fd5eb5-kube-api-access-gnjjm\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.921856 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-dns-svc\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.921876 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.921899 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e75b642c-ef02-47f7-a262-ee9699fd5eb5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.921946 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-config\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.921967 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data-custom\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.921985 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e75b642c-ef02-47f7-a262-ee9699fd5eb5-logs\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.922001 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.922030 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-sb\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.922067 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6ff6\" (UniqueName: \"kubernetes.io/projected/07694812-cd06-4758-9694-5c522ed85202-kube-api-access-x6ff6\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.922082 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-scripts\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.924957 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-config\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.925377 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-dns-svc\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.925597 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-sb\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.927629 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-nb\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:05 crc kubenswrapper[4876]: I1215 08:54:05.946258 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6ff6\" (UniqueName: \"kubernetes.io/projected/07694812-cd06-4758-9694-5c522ed85202-kube-api-access-x6ff6\") pod \"dnsmasq-dns-9b7c74d49-6d99r\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.022826 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data-custom\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.022872 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e75b642c-ef02-47f7-a262-ee9699fd5eb5-logs\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.022890 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.022937 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-scripts\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.022993 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnjjm\" (UniqueName: \"kubernetes.io/projected/e75b642c-ef02-47f7-a262-ee9699fd5eb5-kube-api-access-gnjjm\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.023018 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.023054 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e75b642c-ef02-47f7-a262-ee9699fd5eb5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.023158 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e75b642c-ef02-47f7-a262-ee9699fd5eb5-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.023536 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e75b642c-ef02-47f7-a262-ee9699fd5eb5-logs\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.026978 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.028539 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-scripts\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.028712 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.035400 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data-custom\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.040694 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnjjm\" (UniqueName: \"kubernetes.io/projected/e75b642c-ef02-47f7-a262-ee9699fd5eb5-kube-api-access-gnjjm\") pod \"cinder-api-0\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.089775 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.214015 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.574322 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9b7c74d49-6d99r"] Dec 15 08:54:06 crc kubenswrapper[4876]: I1215 08:54:06.738370 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 15 08:54:06 crc kubenswrapper[4876]: W1215 08:54:06.739254 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode75b642c_ef02_47f7_a262_ee9699fd5eb5.slice/crio-5651fecba1926a2de06b9bc9847d4ba0a7d629a37feec573f8102572ed49cf5a WatchSource:0}: Error finding container 5651fecba1926a2de06b9bc9847d4ba0a7d629a37feec573f8102572ed49cf5a: Status 404 returned error can't find the container with id 5651fecba1926a2de06b9bc9847d4ba0a7d629a37feec573f8102572ed49cf5a Dec 15 08:54:07 crc kubenswrapper[4876]: I1215 08:54:07.457375 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e75b642c-ef02-47f7-a262-ee9699fd5eb5","Type":"ContainerStarted","Data":"41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3"} Dec 15 08:54:07 crc kubenswrapper[4876]: I1215 08:54:07.458006 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e75b642c-ef02-47f7-a262-ee9699fd5eb5","Type":"ContainerStarted","Data":"5651fecba1926a2de06b9bc9847d4ba0a7d629a37feec573f8102572ed49cf5a"} Dec 15 08:54:07 crc kubenswrapper[4876]: I1215 08:54:07.458987 4876 generic.go:334] "Generic (PLEG): container finished" podID="07694812-cd06-4758-9694-5c522ed85202" containerID="f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd" exitCode=0 Dec 15 08:54:07 crc kubenswrapper[4876]: I1215 08:54:07.459016 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" event={"ID":"07694812-cd06-4758-9694-5c522ed85202","Type":"ContainerDied","Data":"f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd"} Dec 15 08:54:07 crc kubenswrapper[4876]: I1215 08:54:07.459031 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" event={"ID":"07694812-cd06-4758-9694-5c522ed85202","Type":"ContainerStarted","Data":"8a90761af304c701aa52179de12c8f34186df5a81a5e7d6c913ebc0ce705831b"} Dec 15 08:54:08 crc kubenswrapper[4876]: I1215 08:54:08.468965 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e75b642c-ef02-47f7-a262-ee9699fd5eb5","Type":"ContainerStarted","Data":"63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0"} Dec 15 08:54:08 crc kubenswrapper[4876]: I1215 08:54:08.469414 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 15 08:54:08 crc kubenswrapper[4876]: I1215 08:54:08.473218 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" event={"ID":"07694812-cd06-4758-9694-5c522ed85202","Type":"ContainerStarted","Data":"c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128"} Dec 15 08:54:08 crc kubenswrapper[4876]: I1215 08:54:08.473450 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:08 crc kubenswrapper[4876]: I1215 08:54:08.484826 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.484811835 podStartE2EDuration="3.484811835s" podCreationTimestamp="2025-12-15 08:54:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:54:08.483980013 +0000 UTC m=+7374.055122944" watchObservedRunningTime="2025-12-15 08:54:08.484811835 +0000 UTC m=+7374.055954756" Dec 15 08:54:08 crc kubenswrapper[4876]: I1215 08:54:08.514329 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" podStartSLOduration=3.514307568 podStartE2EDuration="3.514307568s" podCreationTimestamp="2025-12-15 08:54:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:54:08.508540913 +0000 UTC m=+7374.079683824" watchObservedRunningTime="2025-12-15 08:54:08.514307568 +0000 UTC m=+7374.085450469" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.303727 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5pfhr"] Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.306428 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.333804 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5pfhr"] Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.450450 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-catalog-content\") pod \"certified-operators-5pfhr\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.450608 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-utilities\") pod \"certified-operators-5pfhr\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.450757 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6kd8\" (UniqueName: \"kubernetes.io/projected/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-kube-api-access-x6kd8\") pod \"certified-operators-5pfhr\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.552758 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-catalog-content\") pod \"certified-operators-5pfhr\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.552883 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-utilities\") pod \"certified-operators-5pfhr\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.552921 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6kd8\" (UniqueName: \"kubernetes.io/projected/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-kube-api-access-x6kd8\") pod \"certified-operators-5pfhr\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.553387 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-catalog-content\") pod \"certified-operators-5pfhr\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.554826 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-utilities\") pod \"certified-operators-5pfhr\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.571569 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6kd8\" (UniqueName: \"kubernetes.io/projected/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-kube-api-access-x6kd8\") pod \"certified-operators-5pfhr\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:13 crc kubenswrapper[4876]: I1215 08:54:13.632405 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:14 crc kubenswrapper[4876]: I1215 08:54:14.129742 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5pfhr"] Dec 15 08:54:14 crc kubenswrapper[4876]: I1215 08:54:14.524736 4876 generic.go:334] "Generic (PLEG): container finished" podID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerID="574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee" exitCode=0 Dec 15 08:54:14 crc kubenswrapper[4876]: I1215 08:54:14.524850 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pfhr" event={"ID":"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd","Type":"ContainerDied","Data":"574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee"} Dec 15 08:54:14 crc kubenswrapper[4876]: I1215 08:54:14.525063 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pfhr" event={"ID":"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd","Type":"ContainerStarted","Data":"75a3023e8e55a16847dfa91a793baac94fa24a25f4e8389ea9f678fa3c18c224"} Dec 15 08:54:15 crc kubenswrapper[4876]: I1215 08:54:15.542998 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pfhr" event={"ID":"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd","Type":"ContainerStarted","Data":"d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d"} Dec 15 08:54:16 crc kubenswrapper[4876]: I1215 08:54:16.092956 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 08:54:16 crc kubenswrapper[4876]: I1215 08:54:16.158656 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6b75bdc5-4ds9k"] Dec 15 08:54:16 crc kubenswrapper[4876]: I1215 08:54:16.158907 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" podUID="25132660-5261-408f-8134-665f6d60af9e" containerName="dnsmasq-dns" containerID="cri-o://e58b1770c521ff81ff626d86138ff7cff5c163f2aca4f095daaf6888810dadcc" gracePeriod=10 Dec 15 08:54:16 crc kubenswrapper[4876]: I1215 08:54:16.558704 4876 generic.go:334] "Generic (PLEG): container finished" podID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerID="d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d" exitCode=0 Dec 15 08:54:16 crc kubenswrapper[4876]: I1215 08:54:16.559905 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pfhr" event={"ID":"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd","Type":"ContainerDied","Data":"d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d"} Dec 15 08:54:16 crc kubenswrapper[4876]: I1215 08:54:16.571945 4876 generic.go:334] "Generic (PLEG): container finished" podID="25132660-5261-408f-8134-665f6d60af9e" containerID="e58b1770c521ff81ff626d86138ff7cff5c163f2aca4f095daaf6888810dadcc" exitCode=0 Dec 15 08:54:16 crc kubenswrapper[4876]: I1215 08:54:16.572000 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" event={"ID":"25132660-5261-408f-8134-665f6d60af9e","Type":"ContainerDied","Data":"e58b1770c521ff81ff626d86138ff7cff5c163f2aca4f095daaf6888810dadcc"} Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.149778 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.322184 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-sb\") pod \"25132660-5261-408f-8134-665f6d60af9e\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.322234 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-dns-svc\") pod \"25132660-5261-408f-8134-665f6d60af9e\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.322375 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-config\") pod \"25132660-5261-408f-8134-665f6d60af9e\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.322436 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlpxk\" (UniqueName: \"kubernetes.io/projected/25132660-5261-408f-8134-665f6d60af9e-kube-api-access-dlpxk\") pod \"25132660-5261-408f-8134-665f6d60af9e\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.322588 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-nb\") pod \"25132660-5261-408f-8134-665f6d60af9e\" (UID: \"25132660-5261-408f-8134-665f6d60af9e\") " Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.331637 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25132660-5261-408f-8134-665f6d60af9e-kube-api-access-dlpxk" (OuterVolumeSpecName: "kube-api-access-dlpxk") pod "25132660-5261-408f-8134-665f6d60af9e" (UID: "25132660-5261-408f-8134-665f6d60af9e"). InnerVolumeSpecName "kube-api-access-dlpxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.385838 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "25132660-5261-408f-8134-665f6d60af9e" (UID: "25132660-5261-408f-8134-665f6d60af9e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.392806 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "25132660-5261-408f-8134-665f6d60af9e" (UID: "25132660-5261-408f-8134-665f6d60af9e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.393224 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-config" (OuterVolumeSpecName: "config") pod "25132660-5261-408f-8134-665f6d60af9e" (UID: "25132660-5261-408f-8134-665f6d60af9e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.414865 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "25132660-5261-408f-8134-665f6d60af9e" (UID: "25132660-5261-408f-8134-665f6d60af9e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.424467 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.424497 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.424508 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.424517 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25132660-5261-408f-8134-665f6d60af9e-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.424527 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlpxk\" (UniqueName: \"kubernetes.io/projected/25132660-5261-408f-8134-665f6d60af9e-kube-api-access-dlpxk\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.544466 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.544725 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="fb55517c-d53c-40c9-9109-a5afc12665ba" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0" gracePeriod=30 Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.567273 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.567511 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerName="nova-metadata-log" containerID="cri-o://e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d" gracePeriod=30 Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.567958 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerName="nova-metadata-metadata" containerID="cri-o://7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345" gracePeriod=30 Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.579195 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.579460 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="a17c448b-7ca1-4969-8809-686f22a724eb" containerName="nova-cell0-conductor-conductor" containerID="cri-o://512e54014c71727ad9498e29ec20dfd11e88c241c89b15abfb0299ec03d618be" gracePeriod=30 Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.587881 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" event={"ID":"25132660-5261-408f-8134-665f6d60af9e","Type":"ContainerDied","Data":"6680f0bc44514d5e145d3be56ad5e58b755722e2632bafbd2747beb39860d511"} Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.587926 4876 scope.go:117] "RemoveContainer" containerID="e58b1770c521ff81ff626d86138ff7cff5c163f2aca4f095daaf6888810dadcc" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.588044 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6b75bdc5-4ds9k" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.600229 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.600292 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pfhr" event={"ID":"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd","Type":"ContainerStarted","Data":"761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee"} Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.600478 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="bd54c160-5bef-46b5-9485-e042399e3330" containerName="nova-scheduler-scheduler" containerID="cri-o://20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a" gracePeriod=30 Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.610519 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.610731 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerName="nova-api-log" containerID="cri-o://9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6" gracePeriod=30 Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.610817 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerName="nova-api-api" containerID="cri-o://f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39" gracePeriod=30 Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.642701 4876 scope.go:117] "RemoveContainer" containerID="fd0423da8ed140ad77dd41313d2dd3560f04ac6ed63f1ba1e0746f270e281121" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.749370 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5pfhr" podStartSLOduration=2.029971617 podStartE2EDuration="4.749342596s" podCreationTimestamp="2025-12-15 08:54:13 +0000 UTC" firstStartedPulling="2025-12-15 08:54:14.526830855 +0000 UTC m=+7380.097973766" lastFinishedPulling="2025-12-15 08:54:17.246201834 +0000 UTC m=+7382.817344745" observedRunningTime="2025-12-15 08:54:17.627962984 +0000 UTC m=+7383.199105895" watchObservedRunningTime="2025-12-15 08:54:17.749342596 +0000 UTC m=+7383.320485507" Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.751407 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6b75bdc5-4ds9k"] Dec 15 08:54:17 crc kubenswrapper[4876]: I1215 08:54:17.770137 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c6b75bdc5-4ds9k"] Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.611414 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.614774 4876 generic.go:334] "Generic (PLEG): container finished" podID="fb55517c-d53c-40c9-9109-a5afc12665ba" containerID="45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0" exitCode=0 Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.614855 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fb55517c-d53c-40c9-9109-a5afc12665ba","Type":"ContainerDied","Data":"45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0"} Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.614888 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fb55517c-d53c-40c9-9109-a5afc12665ba","Type":"ContainerDied","Data":"44e7128ebe79321e1b209afed6fda02a9ab300e9c67f96df4bfeaf79f3d0ce3d"} Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.614911 4876 scope.go:117] "RemoveContainer" containerID="45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.620357 4876 generic.go:334] "Generic (PLEG): container finished" podID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerID="e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d" exitCode=143 Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.620453 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9c78a2bb-e344-429f-8dc2-ecef01045ffe","Type":"ContainerDied","Data":"e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d"} Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.634335 4876 generic.go:334] "Generic (PLEG): container finished" podID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerID="9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6" exitCode=143 Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.634474 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d149f9fe-529b-4652-b507-0e1e1821ace2","Type":"ContainerDied","Data":"9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6"} Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.636821 4876 generic.go:334] "Generic (PLEG): container finished" podID="a17c448b-7ca1-4969-8809-686f22a724eb" containerID="512e54014c71727ad9498e29ec20dfd11e88c241c89b15abfb0299ec03d618be" exitCode=0 Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.637285 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a17c448b-7ca1-4969-8809-686f22a724eb","Type":"ContainerDied","Data":"512e54014c71727ad9498e29ec20dfd11e88c241c89b15abfb0299ec03d618be"} Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.729675 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.729805 4876 scope.go:117] "RemoveContainer" containerID="45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0" Dec 15 08:54:18 crc kubenswrapper[4876]: E1215 08:54:18.737961 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0\": container with ID starting with 45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0 not found: ID does not exist" containerID="45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.738012 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0"} err="failed to get container status \"45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0\": rpc error: code = NotFound desc = could not find container \"45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0\": container with ID starting with 45978e01969e2073611b7377be1024b61a020c32bc8e8c8f94d624f88bddbbe0 not found: ID does not exist" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.740037 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25132660-5261-408f-8134-665f6d60af9e" path="/var/lib/kubelet/pods/25132660-5261-408f-8134-665f6d60af9e/volumes" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.756248 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-config-data\") pod \"fb55517c-d53c-40c9-9109-a5afc12665ba\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.756307 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnxlj\" (UniqueName: \"kubernetes.io/projected/fb55517c-d53c-40c9-9109-a5afc12665ba-kube-api-access-vnxlj\") pod \"fb55517c-d53c-40c9-9109-a5afc12665ba\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.756483 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-combined-ca-bundle\") pod \"fb55517c-d53c-40c9-9109-a5afc12665ba\" (UID: \"fb55517c-d53c-40c9-9109-a5afc12665ba\") " Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.763471 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb55517c-d53c-40c9-9109-a5afc12665ba-kube-api-access-vnxlj" (OuterVolumeSpecName: "kube-api-access-vnxlj") pod "fb55517c-d53c-40c9-9109-a5afc12665ba" (UID: "fb55517c-d53c-40c9-9109-a5afc12665ba"). InnerVolumeSpecName "kube-api-access-vnxlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.795236 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-config-data" (OuterVolumeSpecName: "config-data") pod "fb55517c-d53c-40c9-9109-a5afc12665ba" (UID: "fb55517c-d53c-40c9-9109-a5afc12665ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.795810 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb55517c-d53c-40c9-9109-a5afc12665ba" (UID: "fb55517c-d53c-40c9-9109-a5afc12665ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.852773 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.857890 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-config-data\") pod \"a17c448b-7ca1-4969-8809-686f22a724eb\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.857999 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-combined-ca-bundle\") pod \"a17c448b-7ca1-4969-8809-686f22a724eb\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.858048 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m42b6\" (UniqueName: \"kubernetes.io/projected/a17c448b-7ca1-4969-8809-686f22a724eb-kube-api-access-m42b6\") pod \"a17c448b-7ca1-4969-8809-686f22a724eb\" (UID: \"a17c448b-7ca1-4969-8809-686f22a724eb\") " Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.858541 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.858568 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnxlj\" (UniqueName: \"kubernetes.io/projected/fb55517c-d53c-40c9-9109-a5afc12665ba-kube-api-access-vnxlj\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.858584 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb55517c-d53c-40c9-9109-a5afc12665ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.867029 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a17c448b-7ca1-4969-8809-686f22a724eb-kube-api-access-m42b6" (OuterVolumeSpecName: "kube-api-access-m42b6") pod "a17c448b-7ca1-4969-8809-686f22a724eb" (UID: "a17c448b-7ca1-4969-8809-686f22a724eb"). InnerVolumeSpecName "kube-api-access-m42b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.913856 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-config-data" (OuterVolumeSpecName: "config-data") pod "a17c448b-7ca1-4969-8809-686f22a724eb" (UID: "a17c448b-7ca1-4969-8809-686f22a724eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.920310 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a17c448b-7ca1-4969-8809-686f22a724eb" (UID: "a17c448b-7ca1-4969-8809-686f22a724eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.966418 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.966455 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a17c448b-7ca1-4969-8809-686f22a724eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:18 crc kubenswrapper[4876]: I1215 08:54:18.966466 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m42b6\" (UniqueName: \"kubernetes.io/projected/a17c448b-7ca1-4969-8809-686f22a724eb-kube-api-access-m42b6\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.644792 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a17c448b-7ca1-4969-8809-686f22a724eb","Type":"ContainerDied","Data":"4bfe7a7aa38a426ba5dd6bbcaaab964062d100e2f0262b683c0173a82b9fde1a"} Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.644817 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.644853 4876 scope.go:117] "RemoveContainer" containerID="512e54014c71727ad9498e29ec20dfd11e88c241c89b15abfb0299ec03d618be" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.646698 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.692747 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.712665 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.759337 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 08:54:19 crc kubenswrapper[4876]: E1215 08:54:19.769824 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25132660-5261-408f-8134-665f6d60af9e" containerName="init" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.769861 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="25132660-5261-408f-8134-665f6d60af9e" containerName="init" Dec 15 08:54:19 crc kubenswrapper[4876]: E1215 08:54:19.769908 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb55517c-d53c-40c9-9109-a5afc12665ba" containerName="nova-cell1-novncproxy-novncproxy" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.769915 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb55517c-d53c-40c9-9109-a5afc12665ba" containerName="nova-cell1-novncproxy-novncproxy" Dec 15 08:54:19 crc kubenswrapper[4876]: E1215 08:54:19.769931 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25132660-5261-408f-8134-665f6d60af9e" containerName="dnsmasq-dns" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.769938 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="25132660-5261-408f-8134-665f6d60af9e" containerName="dnsmasq-dns" Dec 15 08:54:19 crc kubenswrapper[4876]: E1215 08:54:19.769947 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a17c448b-7ca1-4969-8809-686f22a724eb" containerName="nova-cell0-conductor-conductor" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.769955 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a17c448b-7ca1-4969-8809-686f22a724eb" containerName="nova-cell0-conductor-conductor" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.770235 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a17c448b-7ca1-4969-8809-686f22a724eb" containerName="nova-cell0-conductor-conductor" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.770249 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="25132660-5261-408f-8134-665f6d60af9e" containerName="dnsmasq-dns" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.770259 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb55517c-d53c-40c9-9109-a5afc12665ba" containerName="nova-cell1-novncproxy-novncproxy" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.770873 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.775249 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.793549 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.802822 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.812139 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.820724 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.822093 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.824732 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.831996 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.895296 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6dh6\" (UniqueName: \"kubernetes.io/projected/9bdbec5e-3396-41be-988d-f7883fca149a-kube-api-access-s6dh6\") pod \"nova-cell0-conductor-0\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.895370 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.895588 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.997820 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.997877 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bad51e1-7309-47a0-8d07-e3f4d9bbd104-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bad51e1-7309-47a0-8d07-e3f4d9bbd104\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.997925 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.998045 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bad51e1-7309-47a0-8d07-e3f4d9bbd104-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bad51e1-7309-47a0-8d07-e3f4d9bbd104\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.998078 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg4gp\" (UniqueName: \"kubernetes.io/projected/5bad51e1-7309-47a0-8d07-e3f4d9bbd104-kube-api-access-sg4gp\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bad51e1-7309-47a0-8d07-e3f4d9bbd104\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:19 crc kubenswrapper[4876]: I1215 08:54:19.998274 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6dh6\" (UniqueName: \"kubernetes.io/projected/9bdbec5e-3396-41be-988d-f7883fca149a-kube-api-access-s6dh6\") pod \"nova-cell0-conductor-0\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.004526 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.008605 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.030789 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6dh6\" (UniqueName: \"kubernetes.io/projected/9bdbec5e-3396-41be-988d-f7883fca149a-kube-api-access-s6dh6\") pod \"nova-cell0-conductor-0\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.099415 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bad51e1-7309-47a0-8d07-e3f4d9bbd104-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bad51e1-7309-47a0-8d07-e3f4d9bbd104\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.099469 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg4gp\" (UniqueName: \"kubernetes.io/projected/5bad51e1-7309-47a0-8d07-e3f4d9bbd104-kube-api-access-sg4gp\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bad51e1-7309-47a0-8d07-e3f4d9bbd104\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.099554 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bad51e1-7309-47a0-8d07-e3f4d9bbd104-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bad51e1-7309-47a0-8d07-e3f4d9bbd104\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.099998 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.104335 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bad51e1-7309-47a0-8d07-e3f4d9bbd104-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bad51e1-7309-47a0-8d07-e3f4d9bbd104\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.117894 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bad51e1-7309-47a0-8d07-e3f4d9bbd104-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bad51e1-7309-47a0-8d07-e3f4d9bbd104\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.118713 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg4gp\" (UniqueName: \"kubernetes.io/projected/5bad51e1-7309-47a0-8d07-e3f4d9bbd104-kube-api-access-sg4gp\") pod \"nova-cell1-novncproxy-0\" (UID: \"5bad51e1-7309-47a0-8d07-e3f4d9bbd104\") " pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.162593 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.500334 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.615507 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fvlm\" (UniqueName: \"kubernetes.io/projected/bd54c160-5bef-46b5-9485-e042399e3330-kube-api-access-9fvlm\") pod \"bd54c160-5bef-46b5-9485-e042399e3330\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.615618 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-combined-ca-bundle\") pod \"bd54c160-5bef-46b5-9485-e042399e3330\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.615640 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-config-data\") pod \"bd54c160-5bef-46b5-9485-e042399e3330\" (UID: \"bd54c160-5bef-46b5-9485-e042399e3330\") " Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.629370 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd54c160-5bef-46b5-9485-e042399e3330-kube-api-access-9fvlm" (OuterVolumeSpecName: "kube-api-access-9fvlm") pod "bd54c160-5bef-46b5-9485-e042399e3330" (UID: "bd54c160-5bef-46b5-9485-e042399e3330"). InnerVolumeSpecName "kube-api-access-9fvlm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.647744 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.653816 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-config-data" (OuterVolumeSpecName: "config-data") pod "bd54c160-5bef-46b5-9485-e042399e3330" (UID: "bd54c160-5bef-46b5-9485-e042399e3330"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.655989 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.656460 4876 generic.go:334] "Generic (PLEG): container finished" podID="bd54c160-5bef-46b5-9485-e042399e3330" containerID="20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a" exitCode=0 Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.656491 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bd54c160-5bef-46b5-9485-e042399e3330","Type":"ContainerDied","Data":"20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a"} Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.656511 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bd54c160-5bef-46b5-9485-e042399e3330","Type":"ContainerDied","Data":"630218cdf8dd0bf04d0fd8e5acaa97239577fe479da26bef57d17c5a2c2ef845"} Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.656528 4876 scope.go:117] "RemoveContainer" containerID="20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.656542 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:54:20 crc kubenswrapper[4876]: W1215 08:54:20.658297 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bdbec5e_3396_41be_988d_f7883fca149a.slice/crio-bf4d927a8381dba0d7f38c473e3054be033e6efda8818be69a57bf2b651448fc WatchSource:0}: Error finding container bf4d927a8381dba0d7f38c473e3054be033e6efda8818be69a57bf2b651448fc: Status 404 returned error can't find the container with id bf4d927a8381dba0d7f38c473e3054be033e6efda8818be69a57bf2b651448fc Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.665133 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd54c160-5bef-46b5-9485-e042399e3330" (UID: "bd54c160-5bef-46b5-9485-e042399e3330"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:20 crc kubenswrapper[4876]: W1215 08:54:20.665557 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bad51e1_7309_47a0_8d07_e3f4d9bbd104.slice/crio-1a8ebdd733c846bf88b9692f582fa89dd5e12fa6f5bf79a64a04275c1a4a0c19 WatchSource:0}: Error finding container 1a8ebdd733c846bf88b9692f582fa89dd5e12fa6f5bf79a64a04275c1a4a0c19: Status 404 returned error can't find the container with id 1a8ebdd733c846bf88b9692f582fa89dd5e12fa6f5bf79a64a04275c1a4a0c19 Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.716957 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a17c448b-7ca1-4969-8809-686f22a724eb" path="/var/lib/kubelet/pods/a17c448b-7ca1-4969-8809-686f22a724eb/volumes" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.717875 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb55517c-d53c-40c9-9109-a5afc12665ba" path="/var/lib/kubelet/pods/fb55517c-d53c-40c9-9109-a5afc12665ba/volumes" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.718609 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.718753 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd54c160-5bef-46b5-9485-e042399e3330-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.718769 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fvlm\" (UniqueName: \"kubernetes.io/projected/bd54c160-5bef-46b5-9485-e042399e3330-kube-api-access-9fvlm\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.805368 4876 scope.go:117] "RemoveContainer" containerID="20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a" Dec 15 08:54:20 crc kubenswrapper[4876]: E1215 08:54:20.806794 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a\": container with ID starting with 20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a not found: ID does not exist" containerID="20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.806860 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a"} err="failed to get container status \"20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a\": rpc error: code = NotFound desc = could not find container \"20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a\": container with ID starting with 20445e0a8f881f3c8f43cf1f6910e9fa849b2327988ddcabed409e7c1540a49a not found: ID does not exist" Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.876048 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 08:54:20 crc kubenswrapper[4876]: I1215 08:54:20.876276 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="ebceed8f-b110-451e-a2b3-b228414b2b5c" containerName="nova-cell1-conductor-conductor" containerID="cri-o://12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122" gracePeriod=30 Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.033274 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.056027 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.080356 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:54:21 crc kubenswrapper[4876]: E1215 08:54:21.080834 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd54c160-5bef-46b5-9485-e042399e3330" containerName="nova-scheduler-scheduler" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.080855 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd54c160-5bef-46b5-9485-e042399e3330" containerName="nova-scheduler-scheduler" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.081022 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd54c160-5bef-46b5-9485-e042399e3330" containerName="nova-scheduler-scheduler" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.081706 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.084162 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.093695 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.098524 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.233295 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-config-data\") pod \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.233462 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c78a2bb-e344-429f-8dc2-ecef01045ffe-logs\") pod \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.233584 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-combined-ca-bundle\") pod \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.233620 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcmqp\" (UniqueName: \"kubernetes.io/projected/9c78a2bb-e344-429f-8dc2-ecef01045ffe-kube-api-access-tcmqp\") pod \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\" (UID: \"9c78a2bb-e344-429f-8dc2-ecef01045ffe\") " Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.233982 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.234126 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2wt6\" (UniqueName: \"kubernetes.io/projected/356d173e-c3de-410a-938a-0b96ae635b8c-kube-api-access-h2wt6\") pod \"nova-scheduler-0\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.234161 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-config-data\") pod \"nova-scheduler-0\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.235038 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c78a2bb-e344-429f-8dc2-ecef01045ffe-logs" (OuterVolumeSpecName: "logs") pod "9c78a2bb-e344-429f-8dc2-ecef01045ffe" (UID: "9c78a2bb-e344-429f-8dc2-ecef01045ffe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.247378 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c78a2bb-e344-429f-8dc2-ecef01045ffe-kube-api-access-tcmqp" (OuterVolumeSpecName: "kube-api-access-tcmqp") pod "9c78a2bb-e344-429f-8dc2-ecef01045ffe" (UID: "9c78a2bb-e344-429f-8dc2-ecef01045ffe"). InnerVolumeSpecName "kube-api-access-tcmqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.291799 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c78a2bb-e344-429f-8dc2-ecef01045ffe" (UID: "9c78a2bb-e344-429f-8dc2-ecef01045ffe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.295454 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-config-data" (OuterVolumeSpecName: "config-data") pod "9c78a2bb-e344-429f-8dc2-ecef01045ffe" (UID: "9c78a2bb-e344-429f-8dc2-ecef01045ffe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.343328 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.343475 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2wt6\" (UniqueName: \"kubernetes.io/projected/356d173e-c3de-410a-938a-0b96ae635b8c-kube-api-access-h2wt6\") pod \"nova-scheduler-0\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.343627 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-config-data\") pod \"nova-scheduler-0\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.343747 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.343764 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcmqp\" (UniqueName: \"kubernetes.io/projected/9c78a2bb-e344-429f-8dc2-ecef01045ffe-kube-api-access-tcmqp\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.343775 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c78a2bb-e344-429f-8dc2-ecef01045ffe-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.343784 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c78a2bb-e344-429f-8dc2-ecef01045ffe-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.347881 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.349656 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-config-data\") pod \"nova-scheduler-0\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.361662 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2wt6\" (UniqueName: \"kubernetes.io/projected/356d173e-c3de-410a-938a-0b96ae635b8c-kube-api-access-h2wt6\") pod \"nova-scheduler-0\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.412203 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.508686 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.648350 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-combined-ca-bundle\") pod \"d149f9fe-529b-4652-b507-0e1e1821ace2\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.648448 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-config-data\") pod \"d149f9fe-529b-4652-b507-0e1e1821ace2\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.648486 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45dsm\" (UniqueName: \"kubernetes.io/projected/d149f9fe-529b-4652-b507-0e1e1821ace2-kube-api-access-45dsm\") pod \"d149f9fe-529b-4652-b507-0e1e1821ace2\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.648673 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d149f9fe-529b-4652-b507-0e1e1821ace2-logs\") pod \"d149f9fe-529b-4652-b507-0e1e1821ace2\" (UID: \"d149f9fe-529b-4652-b507-0e1e1821ace2\") " Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.649596 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d149f9fe-529b-4652-b507-0e1e1821ace2-logs" (OuterVolumeSpecName: "logs") pod "d149f9fe-529b-4652-b507-0e1e1821ace2" (UID: "d149f9fe-529b-4652-b507-0e1e1821ace2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.657232 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d149f9fe-529b-4652-b507-0e1e1821ace2-kube-api-access-45dsm" (OuterVolumeSpecName: "kube-api-access-45dsm") pod "d149f9fe-529b-4652-b507-0e1e1821ace2" (UID: "d149f9fe-529b-4652-b507-0e1e1821ace2"). InnerVolumeSpecName "kube-api-access-45dsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.677823 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d149f9fe-529b-4652-b507-0e1e1821ace2" (UID: "d149f9fe-529b-4652-b507-0e1e1821ace2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.680385 4876 generic.go:334] "Generic (PLEG): container finished" podID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerID="f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39" exitCode=0 Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.680459 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d149f9fe-529b-4652-b507-0e1e1821ace2","Type":"ContainerDied","Data":"f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39"} Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.680485 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d149f9fe-529b-4652-b507-0e1e1821ace2","Type":"ContainerDied","Data":"11be1b51cd260f2ca65277afd3469d136bf20edad5e32d88f176ba524afa2965"} Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.680501 4876 scope.go:117] "RemoveContainer" containerID="f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.680648 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.687285 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9bdbec5e-3396-41be-988d-f7883fca149a","Type":"ContainerStarted","Data":"7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013"} Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.687342 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9bdbec5e-3396-41be-988d-f7883fca149a","Type":"ContainerStarted","Data":"bf4d927a8381dba0d7f38c473e3054be033e6efda8818be69a57bf2b651448fc"} Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.687575 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.689175 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5bad51e1-7309-47a0-8d07-e3f4d9bbd104","Type":"ContainerStarted","Data":"733b7f8c801c621edf374787639f877ede6910bd8ee655585db6d47764d2a759"} Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.689204 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"5bad51e1-7309-47a0-8d07-e3f4d9bbd104","Type":"ContainerStarted","Data":"1a8ebdd733c846bf88b9692f582fa89dd5e12fa6f5bf79a64a04275c1a4a0c19"} Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.697822 4876 generic.go:334] "Generic (PLEG): container finished" podID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerID="7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345" exitCode=0 Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.697866 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9c78a2bb-e344-429f-8dc2-ecef01045ffe","Type":"ContainerDied","Data":"7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345"} Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.697889 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9c78a2bb-e344-429f-8dc2-ecef01045ffe","Type":"ContainerDied","Data":"bc8db74515bfda8158dce85831f7cd726ed56a0bcc3df6757b52660a31a45578"} Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.697942 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.703030 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-config-data" (OuterVolumeSpecName: "config-data") pod "d149f9fe-529b-4652-b507-0e1e1821ace2" (UID: "d149f9fe-529b-4652-b507-0e1e1821ace2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.707520 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.707498626 podStartE2EDuration="2.707498626s" podCreationTimestamp="2025-12-15 08:54:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:54:21.701157575 +0000 UTC m=+7387.272300506" watchObservedRunningTime="2025-12-15 08:54:21.707498626 +0000 UTC m=+7387.278641537" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.720867 4876 scope.go:117] "RemoveContainer" containerID="9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.735313 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.735263171 podStartE2EDuration="2.735263171s" podCreationTimestamp="2025-12-15 08:54:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:54:21.720600997 +0000 UTC m=+7387.291743928" watchObservedRunningTime="2025-12-15 08:54:21.735263171 +0000 UTC m=+7387.306406102" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.753813 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d149f9fe-529b-4652-b507-0e1e1821ace2-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.753855 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.753872 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d149f9fe-529b-4652-b507-0e1e1821ace2-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.753888 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45dsm\" (UniqueName: \"kubernetes.io/projected/d149f9fe-529b-4652-b507-0e1e1821ace2-kube-api-access-45dsm\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.755903 4876 scope.go:117] "RemoveContainer" containerID="f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39" Dec 15 08:54:21 crc kubenswrapper[4876]: E1215 08:54:21.761074 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39\": container with ID starting with f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39 not found: ID does not exist" containerID="f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.769257 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39"} err="failed to get container status \"f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39\": rpc error: code = NotFound desc = could not find container \"f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39\": container with ID starting with f5aa81c5cdf33d6131b6cd07abddc36399c9f0379900a82ea81ad7e49fbb7b39 not found: ID does not exist" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.769287 4876 scope.go:117] "RemoveContainer" containerID="9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.770477 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:54:21 crc kubenswrapper[4876]: E1215 08:54:21.770769 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6\": container with ID starting with 9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6 not found: ID does not exist" containerID="9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.770788 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6"} err="failed to get container status \"9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6\": rpc error: code = NotFound desc = could not find container \"9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6\": container with ID starting with 9606021f35d48074be0885a33f02a04a4bd050ef0b5d9926f7fe3d0944a398b6 not found: ID does not exist" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.770803 4876 scope.go:117] "RemoveContainer" containerID="7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.786554 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.801169 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:54:21 crc kubenswrapper[4876]: E1215 08:54:21.801752 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerName="nova-api-api" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.801831 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerName="nova-api-api" Dec 15 08:54:21 crc kubenswrapper[4876]: E1215 08:54:21.801899 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerName="nova-api-log" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.801949 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerName="nova-api-log" Dec 15 08:54:21 crc kubenswrapper[4876]: E1215 08:54:21.802021 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerName="nova-metadata-log" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.802079 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerName="nova-metadata-log" Dec 15 08:54:21 crc kubenswrapper[4876]: E1215 08:54:21.802212 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerName="nova-metadata-metadata" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.802274 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerName="nova-metadata-metadata" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.802490 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerName="nova-api-log" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.802555 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" containerName="nova-api-api" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.802611 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerName="nova-metadata-log" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.802677 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" containerName="nova-metadata-metadata" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.803657 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.805342 4876 scope.go:117] "RemoveContainer" containerID="e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.806465 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.815684 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.830658 4876 scope.go:117] "RemoveContainer" containerID="7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345" Dec 15 08:54:21 crc kubenswrapper[4876]: E1215 08:54:21.831378 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345\": container with ID starting with 7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345 not found: ID does not exist" containerID="7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.831423 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345"} err="failed to get container status \"7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345\": rpc error: code = NotFound desc = could not find container \"7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345\": container with ID starting with 7b1aadc7bc0727a04ae27537addee85bf8f93d92a50cb5ebd236f0461c465345 not found: ID does not exist" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.831451 4876 scope.go:117] "RemoveContainer" containerID="e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d" Dec 15 08:54:21 crc kubenswrapper[4876]: E1215 08:54:21.831860 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d\": container with ID starting with e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d not found: ID does not exist" containerID="e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.831932 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d"} err="failed to get container status \"e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d\": rpc error: code = NotFound desc = could not find container \"e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d\": container with ID starting with e9f810e1c437db868b6e1dc27ed78759fa21cd60e31d9e529298bcc6a793b02d not found: ID does not exist" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.855562 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-config-data\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.855690 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r4sp\" (UniqueName: \"kubernetes.io/projected/fc51cb24-9e65-4489-80be-860db3e6b3d9-kube-api-access-4r4sp\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.855739 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc51cb24-9e65-4489-80be-860db3e6b3d9-logs\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.855848 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.904281 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.957269 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-config-data\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.957365 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r4sp\" (UniqueName: \"kubernetes.io/projected/fc51cb24-9e65-4489-80be-860db3e6b3d9-kube-api-access-4r4sp\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.957397 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc51cb24-9e65-4489-80be-860db3e6b3d9-logs\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.957436 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.958912 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc51cb24-9e65-4489-80be-860db3e6b3d9-logs\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.961793 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.962103 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-config-data\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:21 crc kubenswrapper[4876]: I1215 08:54:21.974334 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r4sp\" (UniqueName: \"kubernetes.io/projected/fc51cb24-9e65-4489-80be-860db3e6b3d9-kube-api-access-4r4sp\") pod \"nova-metadata-0\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " pod="openstack/nova-metadata-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.043249 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.059889 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.071907 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.073890 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.076244 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.080728 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.130095 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.262710 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.263143 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-config-data\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.263252 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/072d0610-d4dc-4091-bed1-d145dae1a1d8-logs\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.263411 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4r66\" (UniqueName: \"kubernetes.io/projected/072d0610-d4dc-4091-bed1-d145dae1a1d8-kube-api-access-n4r66\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.365779 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.365876 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-config-data\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.365939 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/072d0610-d4dc-4091-bed1-d145dae1a1d8-logs\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.366022 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4r66\" (UniqueName: \"kubernetes.io/projected/072d0610-d4dc-4091-bed1-d145dae1a1d8-kube-api-access-n4r66\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.367050 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/072d0610-d4dc-4091-bed1-d145dae1a1d8-logs\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.372130 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-config-data\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.375754 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.388691 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4r66\" (UniqueName: \"kubernetes.io/projected/072d0610-d4dc-4091-bed1-d145dae1a1d8-kube-api-access-n4r66\") pod \"nova-api-0\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.397926 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.501153 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.732124 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c78a2bb-e344-429f-8dc2-ecef01045ffe" path="/var/lib/kubelet/pods/9c78a2bb-e344-429f-8dc2-ecef01045ffe/volumes" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.732961 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd54c160-5bef-46b5-9485-e042399e3330" path="/var/lib/kubelet/pods/bd54c160-5bef-46b5-9485-e042399e3330/volumes" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.733543 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d149f9fe-529b-4652-b507-0e1e1821ace2" path="/var/lib/kubelet/pods/d149f9fe-529b-4652-b507-0e1e1821ace2/volumes" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.737286 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc51cb24-9e65-4489-80be-860db3e6b3d9","Type":"ContainerStarted","Data":"914958b5838f9e28f2d70ee55eb77773903a4b51d0ca09f26aa803e8b5be6890"} Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.737333 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc51cb24-9e65-4489-80be-860db3e6b3d9","Type":"ContainerStarted","Data":"1f5a36f1e9ea79bfc4444e9c8cac6cc4c197558e8ba05ee3ba178cbb1d5cc635"} Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.748524 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"356d173e-c3de-410a-938a-0b96ae635b8c","Type":"ContainerStarted","Data":"a534b95b1d16f226db2fca1e598ee3d819604ead8e1c389e1d0e8db94053e21a"} Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.748593 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"356d173e-c3de-410a-938a-0b96ae635b8c","Type":"ContainerStarted","Data":"fe49e22a065fbb232269e73254dd3654f75e781256f14db95c9a8679d48e48dc"} Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.772840 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.772818504 podStartE2EDuration="2.772818504s" podCreationTimestamp="2025-12-15 08:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:54:22.771395925 +0000 UTC m=+7388.342538856" watchObservedRunningTime="2025-12-15 08:54:22.772818504 +0000 UTC m=+7388.343961415" Dec 15 08:54:22 crc kubenswrapper[4876]: I1215 08:54:22.923830 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.634386 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.635356 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.708292 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:23 crc kubenswrapper[4876]: E1215 08:54:23.730456 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122 is running failed: container process not found" containerID="12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 08:54:23 crc kubenswrapper[4876]: E1215 08:54:23.735296 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122 is running failed: container process not found" containerID="12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 08:54:23 crc kubenswrapper[4876]: E1215 08:54:23.739271 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122 is running failed: container process not found" containerID="12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 08:54:23 crc kubenswrapper[4876]: E1215 08:54:23.739388 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="ebceed8f-b110-451e-a2b3-b228414b2b5c" containerName="nova-cell1-conductor-conductor" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.757366 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.785569 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"072d0610-d4dc-4091-bed1-d145dae1a1d8","Type":"ContainerStarted","Data":"d581a9420fa07ef24e6a91716a26f4de8071f8016669a486c9af1f671ada4f63"} Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.785610 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"072d0610-d4dc-4091-bed1-d145dae1a1d8","Type":"ContainerStarted","Data":"ad855c6de82bfc8717ad13eb9be5fcd8d022417a49541afbbc8b4445658bd755"} Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.788753 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc51cb24-9e65-4489-80be-860db3e6b3d9","Type":"ContainerStarted","Data":"ba2ac4ece6d57c8b8ac1cc0728c8febb0724b156c6c42a505702645d7f881fab"} Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.790995 4876 generic.go:334] "Generic (PLEG): container finished" podID="ebceed8f-b110-451e-a2b3-b228414b2b5c" containerID="12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122" exitCode=0 Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.791183 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ebceed8f-b110-451e-a2b3-b228414b2b5c","Type":"ContainerDied","Data":"12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122"} Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.791234 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ebceed8f-b110-451e-a2b3-b228414b2b5c","Type":"ContainerDied","Data":"334491fa2ae0e50454a68d2530365597b64ee6c82cd9b872d90fc3fd70332c4a"} Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.791252 4876 scope.go:117] "RemoveContainer" containerID="12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.791329 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.828886 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.828862104 podStartE2EDuration="2.828862104s" podCreationTimestamp="2025-12-15 08:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:54:23.817484338 +0000 UTC m=+7389.388627259" watchObservedRunningTime="2025-12-15 08:54:23.828862104 +0000 UTC m=+7389.400005015" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.835313 4876 scope.go:117] "RemoveContainer" containerID="12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122" Dec 15 08:54:23 crc kubenswrapper[4876]: E1215 08:54:23.836019 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122\": container with ID starting with 12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122 not found: ID does not exist" containerID="12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.836059 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122"} err="failed to get container status \"12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122\": rpc error: code = NotFound desc = could not find container \"12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122\": container with ID starting with 12fed3d37908dd0bbd3bcc5817ac962c9247bf5afc3df7a9d2a406a852450122 not found: ID does not exist" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.871215 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.899564 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-config-data\") pod \"ebceed8f-b110-451e-a2b3-b228414b2b5c\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.899637 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-combined-ca-bundle\") pod \"ebceed8f-b110-451e-a2b3-b228414b2b5c\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.899722 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr58t\" (UniqueName: \"kubernetes.io/projected/ebceed8f-b110-451e-a2b3-b228414b2b5c-kube-api-access-pr58t\") pod \"ebceed8f-b110-451e-a2b3-b228414b2b5c\" (UID: \"ebceed8f-b110-451e-a2b3-b228414b2b5c\") " Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.906247 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebceed8f-b110-451e-a2b3-b228414b2b5c-kube-api-access-pr58t" (OuterVolumeSpecName: "kube-api-access-pr58t") pod "ebceed8f-b110-451e-a2b3-b228414b2b5c" (UID: "ebceed8f-b110-451e-a2b3-b228414b2b5c"). InnerVolumeSpecName "kube-api-access-pr58t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.923577 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-config-data" (OuterVolumeSpecName: "config-data") pod "ebceed8f-b110-451e-a2b3-b228414b2b5c" (UID: "ebceed8f-b110-451e-a2b3-b228414b2b5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.925642 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ebceed8f-b110-451e-a2b3-b228414b2b5c" (UID: "ebceed8f-b110-451e-a2b3-b228414b2b5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:23 crc kubenswrapper[4876]: I1215 08:54:23.953214 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5pfhr"] Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.003761 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.003797 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebceed8f-b110-451e-a2b3-b228414b2b5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.003815 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr58t\" (UniqueName: \"kubernetes.io/projected/ebceed8f-b110-451e-a2b3-b228414b2b5c-kube-api-access-pr58t\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.135640 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.144459 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.158316 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 08:54:24 crc kubenswrapper[4876]: E1215 08:54:24.158809 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebceed8f-b110-451e-a2b3-b228414b2b5c" containerName="nova-cell1-conductor-conductor" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.158834 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebceed8f-b110-451e-a2b3-b228414b2b5c" containerName="nova-cell1-conductor-conductor" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.159090 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebceed8f-b110-451e-a2b3-b228414b2b5c" containerName="nova-cell1-conductor-conductor" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.159923 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.164661 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.167470 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.308876 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.308954 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhxnd\" (UniqueName: \"kubernetes.io/projected/a164b6d5-b873-42ee-8633-a41c81282469-kube-api-access-hhxnd\") pod \"nova-cell1-conductor-0\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.309290 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.410631 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.410678 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhxnd\" (UniqueName: \"kubernetes.io/projected/a164b6d5-b873-42ee-8633-a41c81282469-kube-api-access-hhxnd\") pod \"nova-cell1-conductor-0\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.410764 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.414739 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.415874 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.426880 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhxnd\" (UniqueName: \"kubernetes.io/projected/a164b6d5-b873-42ee-8633-a41c81282469-kube-api-access-hhxnd\") pod \"nova-cell1-conductor-0\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.498953 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.721360 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebceed8f-b110-451e-a2b3-b228414b2b5c" path="/var/lib/kubelet/pods/ebceed8f-b110-451e-a2b3-b228414b2b5c/volumes" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.809799 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"072d0610-d4dc-4091-bed1-d145dae1a1d8","Type":"ContainerStarted","Data":"91f38ad56e3b97487d5f76a93265af62320fc1976a4257a99903d0edf1fb863f"} Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.840640 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.840617482 podStartE2EDuration="2.840617482s" podCreationTimestamp="2025-12-15 08:54:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:54:24.824440238 +0000 UTC m=+7390.395583159" watchObservedRunningTime="2025-12-15 08:54:24.840617482 +0000 UTC m=+7390.411760423" Dec 15 08:54:24 crc kubenswrapper[4876]: I1215 08:54:24.935355 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 08:54:25 crc kubenswrapper[4876]: I1215 08:54:25.124505 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 15 08:54:25 crc kubenswrapper[4876]: I1215 08:54:25.163131 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:25 crc kubenswrapper[4876]: I1215 08:54:25.817644 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a164b6d5-b873-42ee-8633-a41c81282469","Type":"ContainerStarted","Data":"adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0"} Dec 15 08:54:25 crc kubenswrapper[4876]: I1215 08:54:25.817693 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a164b6d5-b873-42ee-8633-a41c81282469","Type":"ContainerStarted","Data":"0d3979f7540a0f1ea15b4dd30e20ab4765fff4483e43459992f8288a9fb95514"} Dec 15 08:54:25 crc kubenswrapper[4876]: I1215 08:54:25.817808 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5pfhr" podUID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerName="registry-server" containerID="cri-o://761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee" gracePeriod=2 Dec 15 08:54:25 crc kubenswrapper[4876]: I1215 08:54:25.849547 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.8495127249999999 podStartE2EDuration="1.849512725s" podCreationTimestamp="2025-12-15 08:54:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:54:25.848008745 +0000 UTC m=+7391.419151676" watchObservedRunningTime="2025-12-15 08:54:25.849512725 +0000 UTC m=+7391.420655656" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.297961 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.412741 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.449623 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-catalog-content\") pod \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.449947 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6kd8\" (UniqueName: \"kubernetes.io/projected/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-kube-api-access-x6kd8\") pod \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.450066 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-utilities\") pod \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\" (UID: \"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd\") " Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.450953 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-utilities" (OuterVolumeSpecName: "utilities") pod "fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" (UID: "fc32559d-ef6b-41c5-8ed1-2647a22a3fcd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.455070 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-kube-api-access-x6kd8" (OuterVolumeSpecName: "kube-api-access-x6kd8") pod "fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" (UID: "fc32559d-ef6b-41c5-8ed1-2647a22a3fcd"). InnerVolumeSpecName "kube-api-access-x6kd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.516340 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" (UID: "fc32559d-ef6b-41c5-8ed1-2647a22a3fcd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.552216 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.552489 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6kd8\" (UniqueName: \"kubernetes.io/projected/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-kube-api-access-x6kd8\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.552600 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.831612 4876 generic.go:334] "Generic (PLEG): container finished" podID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerID="761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee" exitCode=0 Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.831682 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pfhr" event={"ID":"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd","Type":"ContainerDied","Data":"761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee"} Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.832167 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pfhr" event={"ID":"fc32559d-ef6b-41c5-8ed1-2647a22a3fcd","Type":"ContainerDied","Data":"75a3023e8e55a16847dfa91a793baac94fa24a25f4e8389ea9f678fa3c18c224"} Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.831720 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pfhr" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.832210 4876 scope.go:117] "RemoveContainer" containerID="761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.832887 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.856077 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5pfhr"] Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.862273 4876 scope.go:117] "RemoveContainer" containerID="d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.867894 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5pfhr"] Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.888397 4876 scope.go:117] "RemoveContainer" containerID="574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.928818 4876 scope.go:117] "RemoveContainer" containerID="761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee" Dec 15 08:54:26 crc kubenswrapper[4876]: E1215 08:54:26.929397 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee\": container with ID starting with 761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee not found: ID does not exist" containerID="761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.929511 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee"} err="failed to get container status \"761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee\": rpc error: code = NotFound desc = could not find container \"761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee\": container with ID starting with 761dda51aa3dc422612657544060690cb3a316b0458967a01ebe894ac1cae4ee not found: ID does not exist" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.929620 4876 scope.go:117] "RemoveContainer" containerID="d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d" Dec 15 08:54:26 crc kubenswrapper[4876]: E1215 08:54:26.930426 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d\": container with ID starting with d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d not found: ID does not exist" containerID="d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.930553 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d"} err="failed to get container status \"d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d\": rpc error: code = NotFound desc = could not find container \"d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d\": container with ID starting with d852b6f82732a64ebbfbef0f51d9ea6270993cb3df035abb7820a009ae39757d not found: ID does not exist" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.930651 4876 scope.go:117] "RemoveContainer" containerID="574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee" Dec 15 08:54:26 crc kubenswrapper[4876]: E1215 08:54:26.931928 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee\": container with ID starting with 574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee not found: ID does not exist" containerID="574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee" Dec 15 08:54:26 crc kubenswrapper[4876]: I1215 08:54:26.932068 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee"} err="failed to get container status \"574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee\": rpc error: code = NotFound desc = could not find container \"574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee\": container with ID starting with 574a58e03b39046c7f0be7d2d5fa333f3215acedd0c82b23d25c0abf6b7eabee not found: ID does not exist" Dec 15 08:54:27 crc kubenswrapper[4876]: I1215 08:54:27.132275 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 08:54:27 crc kubenswrapper[4876]: I1215 08:54:27.133408 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 08:54:28 crc kubenswrapper[4876]: I1215 08:54:28.717571 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" path="/var/lib/kubelet/pods/fc32559d-ef6b-41c5-8ed1-2647a22a3fcd/volumes" Dec 15 08:54:30 crc kubenswrapper[4876]: I1215 08:54:30.163643 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:30 crc kubenswrapper[4876]: I1215 08:54:30.176923 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:30 crc kubenswrapper[4876]: I1215 08:54:30.890356 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 15 08:54:31 crc kubenswrapper[4876]: I1215 08:54:31.413523 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 15 08:54:31 crc kubenswrapper[4876]: I1215 08:54:31.439264 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 15 08:54:31 crc kubenswrapper[4876]: I1215 08:54:31.913759 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 15 08:54:32 crc kubenswrapper[4876]: I1215 08:54:32.130898 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 08:54:32 crc kubenswrapper[4876]: I1215 08:54:32.131346 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 08:54:32 crc kubenswrapper[4876]: I1215 08:54:32.400917 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 08:54:32 crc kubenswrapper[4876]: I1215 08:54:32.402158 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 08:54:33 crc kubenswrapper[4876]: I1215 08:54:33.213391 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:54:33 crc kubenswrapper[4876]: I1215 08:54:33.213451 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:54:33 crc kubenswrapper[4876]: I1215 08:54:33.484348 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.95:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:54:33 crc kubenswrapper[4876]: I1215 08:54:33.484348 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.95:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 08:54:34 crc kubenswrapper[4876]: I1215 08:54:34.524102 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 15 08:54:41 crc kubenswrapper[4876]: I1215 08:54:41.883922 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 08:54:41 crc kubenswrapper[4876]: E1215 08:54:41.885056 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerName="extract-utilities" Dec 15 08:54:41 crc kubenswrapper[4876]: I1215 08:54:41.885074 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerName="extract-utilities" Dec 15 08:54:41 crc kubenswrapper[4876]: E1215 08:54:41.885089 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerName="extract-content" Dec 15 08:54:41 crc kubenswrapper[4876]: I1215 08:54:41.885095 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerName="extract-content" Dec 15 08:54:41 crc kubenswrapper[4876]: E1215 08:54:41.885146 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerName="registry-server" Dec 15 08:54:41 crc kubenswrapper[4876]: I1215 08:54:41.885160 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerName="registry-server" Dec 15 08:54:41 crc kubenswrapper[4876]: I1215 08:54:41.885352 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc32559d-ef6b-41c5-8ed1-2647a22a3fcd" containerName="registry-server" Dec 15 08:54:41 crc kubenswrapper[4876]: I1215 08:54:41.886440 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 08:54:41 crc kubenswrapper[4876]: I1215 08:54:41.891736 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 15 08:54:41 crc kubenswrapper[4876]: I1215 08:54:41.904145 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.082382 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3c4d2bac-0413-4aad-86f0-90df9d781218-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.082535 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.082621 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.082876 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-scripts\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.083011 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln724\" (UniqueName: \"kubernetes.io/projected/3c4d2bac-0413-4aad-86f0-90df9d781218-kube-api-access-ln724\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.083345 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.133274 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.134743 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.135870 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.187882 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3c4d2bac-0413-4aad-86f0-90df9d781218-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.188289 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.188367 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.188426 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-scripts\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.188455 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln724\" (UniqueName: \"kubernetes.io/projected/3c4d2bac-0413-4aad-86f0-90df9d781218-kube-api-access-ln724\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.188530 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.191750 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3c4d2bac-0413-4aad-86f0-90df9d781218-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.198272 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.198655 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.202828 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-scripts\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.207036 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.214338 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln724\" (UniqueName: \"kubernetes.io/projected/3c4d2bac-0413-4aad-86f0-90df9d781218-kube-api-access-ln724\") pod \"cinder-scheduler-0\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.221919 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.409703 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.413416 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.419611 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.429795 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.740078 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.992839 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3c4d2bac-0413-4aad-86f0-90df9d781218","Type":"ContainerStarted","Data":"579292a2bf1b92be7eebddd01d49f6e4d16c850222d119d4de8309e2c637910f"} Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.993476 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 15 08:54:42 crc kubenswrapper[4876]: I1215 08:54:42.995214 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 15 08:54:43 crc kubenswrapper[4876]: I1215 08:54:43.000272 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.003976 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3c4d2bac-0413-4aad-86f0-90df9d781218","Type":"ContainerStarted","Data":"0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd"} Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.120725 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.121003 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerName="cinder-api-log" containerID="cri-o://41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3" gracePeriod=30 Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.121425 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerName="cinder-api" containerID="cri-o://63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0" gracePeriod=30 Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.606746 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.612974 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.618525 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.635022 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.760893 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjgnr\" (UniqueName: \"kubernetes.io/projected/1c5a822f-3b02-41d0-b70e-2254bef1ea34-kube-api-access-cjgnr\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.760994 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761011 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761027 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761077 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761116 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-dev\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761256 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761313 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-run\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761353 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761387 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761417 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761478 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/1c5a822f-3b02-41d0-b70e-2254bef1ea34-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761509 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761540 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761555 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-sys\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.761577 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.862713 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.863037 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.863047 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-sys\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.863184 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.863095 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-sys\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.863329 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjgnr\" (UniqueName: \"kubernetes.io/projected/1c5a822f-3b02-41d0-b70e-2254bef1ea34-kube-api-access-cjgnr\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.863926 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.863953 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.863989 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.864046 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.864029 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.863988 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.864172 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.864334 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-dev\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.864544 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-dev\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.864657 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.864718 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-run\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.864796 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.865086 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-run\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.865173 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.865208 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.865249 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.865302 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/1c5a822f-3b02-41d0-b70e-2254bef1ea34-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.865449 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.865329 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.865542 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/1c5a822f-3b02-41d0-b70e-2254bef1ea34-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.870748 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.870831 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/1c5a822f-3b02-41d0-b70e-2254bef1ea34-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.871040 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.871255 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.873792 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5a822f-3b02-41d0-b70e-2254bef1ea34-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.882535 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjgnr\" (UniqueName: \"kubernetes.io/projected/1c5a822f-3b02-41d0-b70e-2254bef1ea34-kube-api-access-cjgnr\") pod \"cinder-volume-volume1-0\" (UID: \"1c5a822f-3b02-41d0-b70e-2254bef1ea34\") " pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:44 crc kubenswrapper[4876]: I1215 08:54:44.952123 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.018442 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3c4d2bac-0413-4aad-86f0-90df9d781218","Type":"ContainerStarted","Data":"dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c"} Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.021062 4876 generic.go:334] "Generic (PLEG): container finished" podID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerID="41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3" exitCode=143 Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.021158 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e75b642c-ef02-47f7-a262-ee9699fd5eb5","Type":"ContainerDied","Data":"41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3"} Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.042036 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.7111003719999998 podStartE2EDuration="4.042016295s" podCreationTimestamp="2025-12-15 08:54:41 +0000 UTC" firstStartedPulling="2025-12-15 08:54:42.765487817 +0000 UTC m=+7408.336630728" lastFinishedPulling="2025-12-15 08:54:43.09640374 +0000 UTC m=+7408.667546651" observedRunningTime="2025-12-15 08:54:45.04033355 +0000 UTC m=+7410.611476481" watchObservedRunningTime="2025-12-15 08:54:45.042016295 +0000 UTC m=+7410.613159216" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.266277 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.268656 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.274352 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.347194 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.385980 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-sys\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386034 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-config-data\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386063 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-config-data-custom\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386087 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-scripts\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386123 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-etc-nvme\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386368 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386487 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/883b9aed-2112-44d2-8698-3446934a0c21-ceph\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386553 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96fdp\" (UniqueName: \"kubernetes.io/projected/883b9aed-2112-44d2-8698-3446934a0c21-kube-api-access-96fdp\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386656 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386709 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386858 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-lib-modules\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386900 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-dev\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.386977 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-run\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.387047 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.387080 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.387183 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489354 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489459 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-sys\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489495 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489551 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-sys\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489495 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-config-data\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489614 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-config-data-custom\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489637 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-scripts\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489657 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-etc-nvme\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489719 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489742 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/883b9aed-2112-44d2-8698-3446934a0c21-ceph\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489912 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-etc-nvme\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.489943 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.491091 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96fdp\" (UniqueName: \"kubernetes.io/projected/883b9aed-2112-44d2-8698-3446934a0c21-kube-api-access-96fdp\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.491188 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492041 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492165 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492276 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-lib-modules\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492267 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492300 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-dev\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492410 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-dev\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492436 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-lib-modules\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492464 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-run\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492508 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-run\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492564 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492594 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.492708 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/883b9aed-2112-44d2-8698-3446934a0c21-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.496024 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-config-data-custom\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.496637 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-config-data\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.497592 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/883b9aed-2112-44d2-8698-3446934a0c21-ceph\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.498085 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.510353 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96fdp\" (UniqueName: \"kubernetes.io/projected/883b9aed-2112-44d2-8698-3446934a0c21-kube-api-access-96fdp\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.531641 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/883b9aed-2112-44d2-8698-3446934a0c21-scripts\") pod \"cinder-backup-0\" (UID: \"883b9aed-2112-44d2-8698-3446934a0c21\") " pod="openstack/cinder-backup-0" Dec 15 08:54:45 crc kubenswrapper[4876]: W1215 08:54:45.565623 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c5a822f_3b02_41d0_b70e_2254bef1ea34.slice/crio-192100723a49c07a64fd92623b53b509b44755108f6942d22ad69773e6c9f3f9 WatchSource:0}: Error finding container 192100723a49c07a64fd92623b53b509b44755108f6942d22ad69773e6c9f3f9: Status 404 returned error can't find the container with id 192100723a49c07a64fd92623b53b509b44755108f6942d22ad69773e6c9f3f9 Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.566516 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Dec 15 08:54:45 crc kubenswrapper[4876]: I1215 08:54:45.620276 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Dec 15 08:54:46 crc kubenswrapper[4876]: I1215 08:54:46.052088 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"1c5a822f-3b02-41d0-b70e-2254bef1ea34","Type":"ContainerStarted","Data":"192100723a49c07a64fd92623b53b509b44755108f6942d22ad69773e6c9f3f9"} Dec 15 08:54:46 crc kubenswrapper[4876]: I1215 08:54:46.287433 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Dec 15 08:54:46 crc kubenswrapper[4876]: W1215 08:54:46.293687 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod883b9aed_2112_44d2_8698_3446934a0c21.slice/crio-d75cf58895752357dae1087293f6eae91a6882e117ea04f9438b5be122d434b3 WatchSource:0}: Error finding container d75cf58895752357dae1087293f6eae91a6882e117ea04f9438b5be122d434b3: Status 404 returned error can't find the container with id d75cf58895752357dae1087293f6eae91a6882e117ea04f9438b5be122d434b3 Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.066598 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"883b9aed-2112-44d2-8698-3446934a0c21","Type":"ContainerStarted","Data":"8a2311084916f110409247305b2f0c1ccf4d84484f2665fbfd05aa945597a61e"} Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.067895 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"883b9aed-2112-44d2-8698-3446934a0c21","Type":"ContainerStarted","Data":"d75cf58895752357dae1087293f6eae91a6882e117ea04f9438b5be122d434b3"} Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.070146 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"1c5a822f-3b02-41d0-b70e-2254bef1ea34","Type":"ContainerStarted","Data":"47ed3b536f855c319591a103202cb24fe7485cb2facfd442862021bcb04bc6cc"} Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.070168 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"1c5a822f-3b02-41d0-b70e-2254bef1ea34","Type":"ContainerStarted","Data":"7527e00a4a5c78363ae5fb0ac9b39d884b00e4a3e07365eee41e4d0f2cefe6c8"} Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.107497 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.586588394 podStartE2EDuration="3.107479442s" podCreationTimestamp="2025-12-15 08:54:44 +0000 UTC" firstStartedPulling="2025-12-15 08:54:45.572649525 +0000 UTC m=+7411.143792436" lastFinishedPulling="2025-12-15 08:54:46.093540583 +0000 UTC m=+7411.664683484" observedRunningTime="2025-12-15 08:54:47.103516216 +0000 UTC m=+7412.674659127" watchObservedRunningTime="2025-12-15 08:54:47.107479442 +0000 UTC m=+7412.678622363" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.222984 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.278442 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.89:8776/healthcheck\": read tcp 10.217.0.2:44368->10.217.1.89:8776: read: connection reset by peer" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.751862 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.836793 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-combined-ca-bundle\") pod \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.836847 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data-custom\") pod \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.836872 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e75b642c-ef02-47f7-a262-ee9699fd5eb5-etc-machine-id\") pod \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.836913 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnjjm\" (UniqueName: \"kubernetes.io/projected/e75b642c-ef02-47f7-a262-ee9699fd5eb5-kube-api-access-gnjjm\") pod \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.836974 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e75b642c-ef02-47f7-a262-ee9699fd5eb5-logs\") pod \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.837173 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-scripts\") pod \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.837327 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data\") pod \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\" (UID: \"e75b642c-ef02-47f7-a262-ee9699fd5eb5\") " Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.837316 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e75b642c-ef02-47f7-a262-ee9699fd5eb5-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e75b642c-ef02-47f7-a262-ee9699fd5eb5" (UID: "e75b642c-ef02-47f7-a262-ee9699fd5eb5"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.837813 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e75b642c-ef02-47f7-a262-ee9699fd5eb5-logs" (OuterVolumeSpecName: "logs") pod "e75b642c-ef02-47f7-a262-ee9699fd5eb5" (UID: "e75b642c-ef02-47f7-a262-ee9699fd5eb5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.838151 4876 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e75b642c-ef02-47f7-a262-ee9699fd5eb5-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.838170 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e75b642c-ef02-47f7-a262-ee9699fd5eb5-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.842182 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-scripts" (OuterVolumeSpecName: "scripts") pod "e75b642c-ef02-47f7-a262-ee9699fd5eb5" (UID: "e75b642c-ef02-47f7-a262-ee9699fd5eb5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.845410 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e75b642c-ef02-47f7-a262-ee9699fd5eb5" (UID: "e75b642c-ef02-47f7-a262-ee9699fd5eb5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.880875 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e75b642c-ef02-47f7-a262-ee9699fd5eb5-kube-api-access-gnjjm" (OuterVolumeSpecName: "kube-api-access-gnjjm") pod "e75b642c-ef02-47f7-a262-ee9699fd5eb5" (UID: "e75b642c-ef02-47f7-a262-ee9699fd5eb5"). InnerVolumeSpecName "kube-api-access-gnjjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.910152 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data" (OuterVolumeSpecName: "config-data") pod "e75b642c-ef02-47f7-a262-ee9699fd5eb5" (UID: "e75b642c-ef02-47f7-a262-ee9699fd5eb5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.939763 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e75b642c-ef02-47f7-a262-ee9699fd5eb5" (UID: "e75b642c-ef02-47f7-a262-ee9699fd5eb5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.941197 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.941229 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.941239 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.941247 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnjjm\" (UniqueName: \"kubernetes.io/projected/e75b642c-ef02-47f7-a262-ee9699fd5eb5-kube-api-access-gnjjm\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:47 crc kubenswrapper[4876]: I1215 08:54:47.941256 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e75b642c-ef02-47f7-a262-ee9699fd5eb5-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.080274 4876 generic.go:334] "Generic (PLEG): container finished" podID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerID="63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0" exitCode=0 Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.080381 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e75b642c-ef02-47f7-a262-ee9699fd5eb5","Type":"ContainerDied","Data":"63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0"} Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.080408 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.080734 4876 scope.go:117] "RemoveContainer" containerID="63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.080714 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e75b642c-ef02-47f7-a262-ee9699fd5eb5","Type":"ContainerDied","Data":"5651fecba1926a2de06b9bc9847d4ba0a7d629a37feec573f8102572ed49cf5a"} Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.084411 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"883b9aed-2112-44d2-8698-3446934a0c21","Type":"ContainerStarted","Data":"4169621cedfb4495e8a06f712ab405717de59cb75dbe403a7a0fbfa13b3572e0"} Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.108480 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=2.751510628 podStartE2EDuration="3.108459081s" podCreationTimestamp="2025-12-15 08:54:45 +0000 UTC" firstStartedPulling="2025-12-15 08:54:46.296262061 +0000 UTC m=+7411.867404972" lastFinishedPulling="2025-12-15 08:54:46.653210504 +0000 UTC m=+7412.224353425" observedRunningTime="2025-12-15 08:54:48.108331048 +0000 UTC m=+7413.679473969" watchObservedRunningTime="2025-12-15 08:54:48.108459081 +0000 UTC m=+7413.679601992" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.109307 4876 scope.go:117] "RemoveContainer" containerID="41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.133165 4876 scope.go:117] "RemoveContainer" containerID="63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0" Dec 15 08:54:48 crc kubenswrapper[4876]: E1215 08:54:48.136032 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0\": container with ID starting with 63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0 not found: ID does not exist" containerID="63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.136142 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0"} err="failed to get container status \"63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0\": rpc error: code = NotFound desc = could not find container \"63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0\": container with ID starting with 63b534b2e11d24993cbb8ddab1a73bc1873edbb72da30fa35297c3bc8cb441b0 not found: ID does not exist" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.136175 4876 scope.go:117] "RemoveContainer" containerID="41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3" Dec 15 08:54:48 crc kubenswrapper[4876]: E1215 08:54:48.139276 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3\": container with ID starting with 41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3 not found: ID does not exist" containerID="41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.139344 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3"} err="failed to get container status \"41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3\": rpc error: code = NotFound desc = could not find container \"41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3\": container with ID starting with 41d921f60886abe00d56d54e33a72a2a3ff39b1e379d2ed8b1234f2478a9fda3 not found: ID does not exist" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.148306 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.165364 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.179431 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 15 08:54:48 crc kubenswrapper[4876]: E1215 08:54:48.179820 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerName="cinder-api-log" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.179839 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerName="cinder-api-log" Dec 15 08:54:48 crc kubenswrapper[4876]: E1215 08:54:48.179855 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerName="cinder-api" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.179861 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerName="cinder-api" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.180028 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerName="cinder-api-log" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.180047 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" containerName="cinder-api" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.181133 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.196747 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.215864 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.348437 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-logs\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.348508 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-config-data\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.348558 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.348580 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.348618 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnb2r\" (UniqueName: \"kubernetes.io/projected/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-kube-api-access-qnb2r\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.348679 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-scripts\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.348909 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-config-data-custom\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.453428 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-logs\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.454068 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-logs\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.454148 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-config-data\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.454392 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.454427 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.454479 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnb2r\" (UniqueName: \"kubernetes.io/projected/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-kube-api-access-qnb2r\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.454569 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-scripts\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.454636 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-config-data-custom\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.455072 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.460994 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.461039 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-config-data-custom\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.465510 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-scripts\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.476406 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-config-data\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.484868 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnb2r\" (UniqueName: \"kubernetes.io/projected/2d55cdb6-397e-475e-9e07-6a6ab4b4342d-kube-api-access-qnb2r\") pod \"cinder-api-0\" (UID: \"2d55cdb6-397e-475e-9e07-6a6ab4b4342d\") " pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.546800 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.720023 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e75b642c-ef02-47f7-a262-ee9699fd5eb5" path="/var/lib/kubelet/pods/e75b642c-ef02-47f7-a262-ee9699fd5eb5/volumes" Dec 15 08:54:48 crc kubenswrapper[4876]: I1215 08:54:48.994967 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 15 08:54:49 crc kubenswrapper[4876]: W1215 08:54:49.005463 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d55cdb6_397e_475e_9e07_6a6ab4b4342d.slice/crio-a91e24404ab8e99f5e1485073a175c408c11b615b0e619c6c48cfa063e8d16ab WatchSource:0}: Error finding container a91e24404ab8e99f5e1485073a175c408c11b615b0e619c6c48cfa063e8d16ab: Status 404 returned error can't find the container with id a91e24404ab8e99f5e1485073a175c408c11b615b0e619c6c48cfa063e8d16ab Dec 15 08:54:49 crc kubenswrapper[4876]: I1215 08:54:49.104494 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2d55cdb6-397e-475e-9e07-6a6ab4b4342d","Type":"ContainerStarted","Data":"a91e24404ab8e99f5e1485073a175c408c11b615b0e619c6c48cfa063e8d16ab"} Dec 15 08:54:49 crc kubenswrapper[4876]: I1215 08:54:49.953055 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:50 crc kubenswrapper[4876]: I1215 08:54:50.126538 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2d55cdb6-397e-475e-9e07-6a6ab4b4342d","Type":"ContainerStarted","Data":"e91811e65bd8892171813323a779f853ad99cec8b667e163c9d902d5325ead6f"} Dec 15 08:54:50 crc kubenswrapper[4876]: I1215 08:54:50.621698 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Dec 15 08:54:51 crc kubenswrapper[4876]: I1215 08:54:51.137816 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2d55cdb6-397e-475e-9e07-6a6ab4b4342d","Type":"ContainerStarted","Data":"0080ee00580314fc96319c698be15be1619e95d1cb9933532b34362fff2e95c9"} Dec 15 08:54:51 crc kubenswrapper[4876]: I1215 08:54:51.137960 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 15 08:54:51 crc kubenswrapper[4876]: I1215 08:54:51.160960 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.160937163 podStartE2EDuration="3.160937163s" podCreationTimestamp="2025-12-15 08:54:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:54:51.153291617 +0000 UTC m=+7416.724434538" watchObservedRunningTime="2025-12-15 08:54:51.160937163 +0000 UTC m=+7416.732080074" Dec 15 08:54:52 crc kubenswrapper[4876]: I1215 08:54:52.435456 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 15 08:54:52 crc kubenswrapper[4876]: I1215 08:54:52.494125 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 08:54:53 crc kubenswrapper[4876]: I1215 08:54:53.156218 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="3c4d2bac-0413-4aad-86f0-90df9d781218" containerName="cinder-scheduler" containerID="cri-o://0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd" gracePeriod=30 Dec 15 08:54:53 crc kubenswrapper[4876]: I1215 08:54:53.156253 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="3c4d2bac-0413-4aad-86f0-90df9d781218" containerName="probe" containerID="cri-o://dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c" gracePeriod=30 Dec 15 08:54:54 crc kubenswrapper[4876]: I1215 08:54:54.167017 4876 generic.go:334] "Generic (PLEG): container finished" podID="3c4d2bac-0413-4aad-86f0-90df9d781218" containerID="dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c" exitCode=0 Dec 15 08:54:54 crc kubenswrapper[4876]: I1215 08:54:54.167085 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3c4d2bac-0413-4aad-86f0-90df9d781218","Type":"ContainerDied","Data":"dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c"} Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.163509 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.750556 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.884557 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.893259 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln724\" (UniqueName: \"kubernetes.io/projected/3c4d2bac-0413-4aad-86f0-90df9d781218-kube-api-access-ln724\") pod \"3c4d2bac-0413-4aad-86f0-90df9d781218\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.893349 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-scripts\") pod \"3c4d2bac-0413-4aad-86f0-90df9d781218\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.893396 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data\") pod \"3c4d2bac-0413-4aad-86f0-90df9d781218\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.893469 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data-custom\") pod \"3c4d2bac-0413-4aad-86f0-90df9d781218\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.893489 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-combined-ca-bundle\") pod \"3c4d2bac-0413-4aad-86f0-90df9d781218\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.893547 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3c4d2bac-0413-4aad-86f0-90df9d781218-etc-machine-id\") pod \"3c4d2bac-0413-4aad-86f0-90df9d781218\" (UID: \"3c4d2bac-0413-4aad-86f0-90df9d781218\") " Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.893960 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3c4d2bac-0413-4aad-86f0-90df9d781218-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3c4d2bac-0413-4aad-86f0-90df9d781218" (UID: "3c4d2bac-0413-4aad-86f0-90df9d781218"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.901378 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-scripts" (OuterVolumeSpecName: "scripts") pod "3c4d2bac-0413-4aad-86f0-90df9d781218" (UID: "3c4d2bac-0413-4aad-86f0-90df9d781218"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.908280 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3c4d2bac-0413-4aad-86f0-90df9d781218" (UID: "3c4d2bac-0413-4aad-86f0-90df9d781218"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.908895 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c4d2bac-0413-4aad-86f0-90df9d781218-kube-api-access-ln724" (OuterVolumeSpecName: "kube-api-access-ln724") pod "3c4d2bac-0413-4aad-86f0-90df9d781218" (UID: "3c4d2bac-0413-4aad-86f0-90df9d781218"). InnerVolumeSpecName "kube-api-access-ln724". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.977136 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c4d2bac-0413-4aad-86f0-90df9d781218" (UID: "3c4d2bac-0413-4aad-86f0-90df9d781218"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.995871 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln724\" (UniqueName: \"kubernetes.io/projected/3c4d2bac-0413-4aad-86f0-90df9d781218-kube-api-access-ln724\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.995909 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.995918 4876 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.995927 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:55 crc kubenswrapper[4876]: I1215 08:54:55.995934 4876 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3c4d2bac-0413-4aad-86f0-90df9d781218-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.008312 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data" (OuterVolumeSpecName: "config-data") pod "3c4d2bac-0413-4aad-86f0-90df9d781218" (UID: "3c4d2bac-0413-4aad-86f0-90df9d781218"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.097284 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c4d2bac-0413-4aad-86f0-90df9d781218-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.186314 4876 generic.go:334] "Generic (PLEG): container finished" podID="3c4d2bac-0413-4aad-86f0-90df9d781218" containerID="0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd" exitCode=0 Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.186367 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3c4d2bac-0413-4aad-86f0-90df9d781218","Type":"ContainerDied","Data":"0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd"} Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.186391 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.186798 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3c4d2bac-0413-4aad-86f0-90df9d781218","Type":"ContainerDied","Data":"579292a2bf1b92be7eebddd01d49f6e4d16c850222d119d4de8309e2c637910f"} Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.186804 4876 scope.go:117] "RemoveContainer" containerID="dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.210770 4876 scope.go:117] "RemoveContainer" containerID="0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.232563 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.238990 4876 scope.go:117] "RemoveContainer" containerID="dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c" Dec 15 08:54:56 crc kubenswrapper[4876]: E1215 08:54:56.241699 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c\": container with ID starting with dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c not found: ID does not exist" containerID="dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.241969 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c"} err="failed to get container status \"dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c\": rpc error: code = NotFound desc = could not find container \"dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c\": container with ID starting with dd58bfd0d24bb46567bcc2e4c3398df63eed6b2f7350800d5b8fe8059618e67c not found: ID does not exist" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.242120 4876 scope.go:117] "RemoveContainer" containerID="0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd" Dec 15 08:54:56 crc kubenswrapper[4876]: E1215 08:54:56.243398 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd\": container with ID starting with 0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd not found: ID does not exist" containerID="0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.243527 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd"} err="failed to get container status \"0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd\": rpc error: code = NotFound desc = could not find container \"0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd\": container with ID starting with 0d34f9469731455c1a93c551352b7f876750615ffd33538947eae82e64f2cadd not found: ID does not exist" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.257005 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.272058 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 08:54:56 crc kubenswrapper[4876]: E1215 08:54:56.272462 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c4d2bac-0413-4aad-86f0-90df9d781218" containerName="probe" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.272481 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c4d2bac-0413-4aad-86f0-90df9d781218" containerName="probe" Dec 15 08:54:56 crc kubenswrapper[4876]: E1215 08:54:56.272504 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c4d2bac-0413-4aad-86f0-90df9d781218" containerName="cinder-scheduler" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.272510 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c4d2bac-0413-4aad-86f0-90df9d781218" containerName="cinder-scheduler" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.272684 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c4d2bac-0413-4aad-86f0-90df9d781218" containerName="probe" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.272703 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c4d2bac-0413-4aad-86f0-90df9d781218" containerName="cinder-scheduler" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.274784 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.279174 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.283810 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.403819 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.403909 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-scripts\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.403948 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5j4q\" (UniqueName: \"kubernetes.io/projected/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-kube-api-access-k5j4q\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.404009 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.404152 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.404214 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-config-data\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.505878 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.505967 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-scripts\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.506003 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5j4q\" (UniqueName: \"kubernetes.io/projected/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-kube-api-access-k5j4q\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.506061 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.506097 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.506141 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-config-data\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.506339 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.511453 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-config-data\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.514583 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.514631 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-scripts\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.514636 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.524282 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5j4q\" (UniqueName: \"kubernetes.io/projected/3b2c2c29-aa94-4339-b6b7-95ee7f48dd73-kube-api-access-k5j4q\") pod \"cinder-scheduler-0\" (UID: \"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73\") " pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.601594 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 15 08:54:56 crc kubenswrapper[4876]: I1215 08:54:56.731451 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c4d2bac-0413-4aad-86f0-90df9d781218" path="/var/lib/kubelet/pods/3c4d2bac-0413-4aad-86f0-90df9d781218/volumes" Dec 15 08:54:57 crc kubenswrapper[4876]: I1215 08:54:57.065790 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 15 08:54:57 crc kubenswrapper[4876]: I1215 08:54:57.195183 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73","Type":"ContainerStarted","Data":"406f8b9340955b2bb0b1a0c57194be02fb6fb57fc4702f2c9d94b7c0b6a206aa"} Dec 15 08:54:58 crc kubenswrapper[4876]: I1215 08:54:58.208090 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73","Type":"ContainerStarted","Data":"1949e161a789405d01113ff40ef3747c50f8153e7533efc15ddeb20a6092829a"} Dec 15 08:54:58 crc kubenswrapper[4876]: I1215 08:54:58.208762 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b2c2c29-aa94-4339-b6b7-95ee7f48dd73","Type":"ContainerStarted","Data":"932ce716f6e83bf7d84e1d6eda5f6a7ea051131d565682fc55fa51f18f25376f"} Dec 15 08:54:58 crc kubenswrapper[4876]: I1215 08:54:58.236129 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.236097257 podStartE2EDuration="2.236097257s" podCreationTimestamp="2025-12-15 08:54:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:54:58.229702025 +0000 UTC m=+7423.800844946" watchObservedRunningTime="2025-12-15 08:54:58.236097257 +0000 UTC m=+7423.807240168" Dec 15 08:55:00 crc kubenswrapper[4876]: I1215 08:55:00.456451 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 15 08:55:01 crc kubenswrapper[4876]: I1215 08:55:01.602776 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 15 08:55:06 crc kubenswrapper[4876]: I1215 08:55:06.829068 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.065177 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-br4mg"] Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.073728 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-br4mg"] Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.821125 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9dmgh"] Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.823944 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.830329 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9dmgh"] Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.894300 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-utilities\") pod \"redhat-operators-9dmgh\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.894408 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-catalog-content\") pod \"redhat-operators-9dmgh\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.894765 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvvvx\" (UniqueName: \"kubernetes.io/projected/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-kube-api-access-kvvvx\") pod \"redhat-operators-9dmgh\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.996394 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvvvx\" (UniqueName: \"kubernetes.io/projected/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-kube-api-access-kvvvx\") pod \"redhat-operators-9dmgh\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.996793 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-utilities\") pod \"redhat-operators-9dmgh\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.996916 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-catalog-content\") pod \"redhat-operators-9dmgh\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.997237 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-utilities\") pod \"redhat-operators-9dmgh\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:45 crc kubenswrapper[4876]: I1215 08:55:45.997454 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-catalog-content\") pod \"redhat-operators-9dmgh\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:46 crc kubenswrapper[4876]: I1215 08:55:46.019500 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvvvx\" (UniqueName: \"kubernetes.io/projected/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-kube-api-access-kvvvx\") pod \"redhat-operators-9dmgh\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:46 crc kubenswrapper[4876]: I1215 08:55:46.030482 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-2f87-account-create-update-hlzht"] Dec 15 08:55:46 crc kubenswrapper[4876]: I1215 08:55:46.043490 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-2f87-account-create-update-hlzht"] Dec 15 08:55:46 crc kubenswrapper[4876]: I1215 08:55:46.158877 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:46 crc kubenswrapper[4876]: I1215 08:55:46.675049 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9dmgh"] Dec 15 08:55:46 crc kubenswrapper[4876]: I1215 08:55:46.718088 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a98bf7d3-593a-47fd-b8ba-70207cc611d4" path="/var/lib/kubelet/pods/a98bf7d3-593a-47fd-b8ba-70207cc611d4/volumes" Dec 15 08:55:46 crc kubenswrapper[4876]: I1215 08:55:46.718868 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0c96afc-ef13-43e7-a2c3-93fd949bdfdd" path="/var/lib/kubelet/pods/c0c96afc-ef13-43e7-a2c3-93fd949bdfdd/volumes" Dec 15 08:55:47 crc kubenswrapper[4876]: I1215 08:55:47.639981 4876 generic.go:334] "Generic (PLEG): container finished" podID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerID="2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e" exitCode=0 Dec 15 08:55:47 crc kubenswrapper[4876]: I1215 08:55:47.640064 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dmgh" event={"ID":"dc1a93cb-fb34-4718-b742-aa9379d0cc8b","Type":"ContainerDied","Data":"2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e"} Dec 15 08:55:47 crc kubenswrapper[4876]: I1215 08:55:47.640291 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dmgh" event={"ID":"dc1a93cb-fb34-4718-b742-aa9379d0cc8b","Type":"ContainerStarted","Data":"1c8303d8f68f838d2ed4f1ef8b0d35b25c9c03049eff9f1027cdcf99ca2a7152"} Dec 15 08:55:49 crc kubenswrapper[4876]: I1215 08:55:49.662377 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dmgh" event={"ID":"dc1a93cb-fb34-4718-b742-aa9379d0cc8b","Type":"ContainerStarted","Data":"775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593"} Dec 15 08:55:51 crc kubenswrapper[4876]: I1215 08:55:51.679511 4876 generic.go:334] "Generic (PLEG): container finished" podID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerID="775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593" exitCode=0 Dec 15 08:55:51 crc kubenswrapper[4876]: I1215 08:55:51.679565 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dmgh" event={"ID":"dc1a93cb-fb34-4718-b742-aa9379d0cc8b","Type":"ContainerDied","Data":"775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593"} Dec 15 08:55:52 crc kubenswrapper[4876]: I1215 08:55:52.690421 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dmgh" event={"ID":"dc1a93cb-fb34-4718-b742-aa9379d0cc8b","Type":"ContainerStarted","Data":"ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110"} Dec 15 08:55:52 crc kubenswrapper[4876]: I1215 08:55:52.720842 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9dmgh" podStartSLOduration=3.137669589 podStartE2EDuration="7.720823265s" podCreationTimestamp="2025-12-15 08:55:45 +0000 UTC" firstStartedPulling="2025-12-15 08:55:47.642352249 +0000 UTC m=+7473.213495170" lastFinishedPulling="2025-12-15 08:55:52.225505935 +0000 UTC m=+7477.796648846" observedRunningTime="2025-12-15 08:55:52.714382723 +0000 UTC m=+7478.285525644" watchObservedRunningTime="2025-12-15 08:55:52.720823265 +0000 UTC m=+7478.291966176" Dec 15 08:55:56 crc kubenswrapper[4876]: I1215 08:55:56.159766 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:56 crc kubenswrapper[4876]: I1215 08:55:56.160432 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:55:57 crc kubenswrapper[4876]: I1215 08:55:57.027567 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-bhn67"] Dec 15 08:55:57 crc kubenswrapper[4876]: I1215 08:55:57.036033 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-bhn67"] Dec 15 08:55:57 crc kubenswrapper[4876]: I1215 08:55:57.202355 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9dmgh" podUID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerName="registry-server" probeResult="failure" output=< Dec 15 08:55:57 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 08:55:57 crc kubenswrapper[4876]: > Dec 15 08:55:57 crc kubenswrapper[4876]: I1215 08:55:57.322874 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:55:57 crc kubenswrapper[4876]: I1215 08:55:57.322931 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:55:58 crc kubenswrapper[4876]: I1215 08:55:58.716765 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8bccbd9-5502-4352-87e6-dc0b1f35b070" path="/var/lib/kubelet/pods/d8bccbd9-5502-4352-87e6-dc0b1f35b070/volumes" Dec 15 08:56:06 crc kubenswrapper[4876]: I1215 08:56:06.215367 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:56:06 crc kubenswrapper[4876]: I1215 08:56:06.265312 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:56:06 crc kubenswrapper[4876]: I1215 08:56:06.456617 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9dmgh"] Dec 15 08:56:07 crc kubenswrapper[4876]: I1215 08:56:07.805580 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9dmgh" podUID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerName="registry-server" containerID="cri-o://ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110" gracePeriod=2 Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.244419 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.325080 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-utilities\") pod \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.325238 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvvvx\" (UniqueName: \"kubernetes.io/projected/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-kube-api-access-kvvvx\") pod \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.325329 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-catalog-content\") pod \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\" (UID: \"dc1a93cb-fb34-4718-b742-aa9379d0cc8b\") " Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.326457 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-utilities" (OuterVolumeSpecName: "utilities") pod "dc1a93cb-fb34-4718-b742-aa9379d0cc8b" (UID: "dc1a93cb-fb34-4718-b742-aa9379d0cc8b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.331127 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-kube-api-access-kvvvx" (OuterVolumeSpecName: "kube-api-access-kvvvx") pod "dc1a93cb-fb34-4718-b742-aa9379d0cc8b" (UID: "dc1a93cb-fb34-4718-b742-aa9379d0cc8b"). InnerVolumeSpecName "kube-api-access-kvvvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.428183 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.428219 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvvvx\" (UniqueName: \"kubernetes.io/projected/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-kube-api-access-kvvvx\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.467797 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc1a93cb-fb34-4718-b742-aa9379d0cc8b" (UID: "dc1a93cb-fb34-4718-b742-aa9379d0cc8b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.529788 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc1a93cb-fb34-4718-b742-aa9379d0cc8b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.819181 4876 generic.go:334] "Generic (PLEG): container finished" podID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerID="ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110" exitCode=0 Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.819237 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dmgh" event={"ID":"dc1a93cb-fb34-4718-b742-aa9379d0cc8b","Type":"ContainerDied","Data":"ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110"} Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.819262 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9dmgh" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.819277 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9dmgh" event={"ID":"dc1a93cb-fb34-4718-b742-aa9379d0cc8b","Type":"ContainerDied","Data":"1c8303d8f68f838d2ed4f1ef8b0d35b25c9c03049eff9f1027cdcf99ca2a7152"} Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.819304 4876 scope.go:117] "RemoveContainer" containerID="ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.846569 4876 scope.go:117] "RemoveContainer" containerID="775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.849394 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9dmgh"] Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.864455 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9dmgh"] Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.869458 4876 scope.go:117] "RemoveContainer" containerID="2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.912683 4876 scope.go:117] "RemoveContainer" containerID="ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110" Dec 15 08:56:08 crc kubenswrapper[4876]: E1215 08:56:08.913472 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110\": container with ID starting with ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110 not found: ID does not exist" containerID="ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.913540 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110"} err="failed to get container status \"ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110\": rpc error: code = NotFound desc = could not find container \"ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110\": container with ID starting with ccc85d28324ac649ab5902d84509155fb21fe38dd4d1dce1c0f5eb0912273110 not found: ID does not exist" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.913587 4876 scope.go:117] "RemoveContainer" containerID="775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593" Dec 15 08:56:08 crc kubenswrapper[4876]: E1215 08:56:08.914049 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593\": container with ID starting with 775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593 not found: ID does not exist" containerID="775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.914094 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593"} err="failed to get container status \"775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593\": rpc error: code = NotFound desc = could not find container \"775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593\": container with ID starting with 775834530eaeab9f6d6d43b5e0632ee0be0ef813de4fd39aafb8d279fd1a9593 not found: ID does not exist" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.914146 4876 scope.go:117] "RemoveContainer" containerID="2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e" Dec 15 08:56:08 crc kubenswrapper[4876]: E1215 08:56:08.914888 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e\": container with ID starting with 2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e not found: ID does not exist" containerID="2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e" Dec 15 08:56:08 crc kubenswrapper[4876]: I1215 08:56:08.914911 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e"} err="failed to get container status \"2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e\": rpc error: code = NotFound desc = could not find container \"2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e\": container with ID starting with 2ea8569888bd43be6cf019cceec70d43c7b6e80ca837b97f3ba422bf4f22480e not found: ID does not exist" Dec 15 08:56:10 crc kubenswrapper[4876]: I1215 08:56:10.719460 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" path="/var/lib/kubelet/pods/dc1a93cb-fb34-4718-b742-aa9379d0cc8b/volumes" Dec 15 08:56:11 crc kubenswrapper[4876]: I1215 08:56:11.035713 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-msbfs"] Dec 15 08:56:11 crc kubenswrapper[4876]: I1215 08:56:11.044390 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-msbfs"] Dec 15 08:56:12 crc kubenswrapper[4876]: I1215 08:56:12.717668 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e02a5401-9efc-48a2-b7fa-a3750f1186fe" path="/var/lib/kubelet/pods/e02a5401-9efc-48a2-b7fa-a3750f1186fe/volumes" Dec 15 08:56:24 crc kubenswrapper[4876]: I1215 08:56:24.908193 4876 scope.go:117] "RemoveContainer" containerID="2bcb602dd33c4efcc5b077ed2d55caec5e0dcea3bbafc32b71404c7cf511ab00" Dec 15 08:56:24 crc kubenswrapper[4876]: I1215 08:56:24.933738 4876 scope.go:117] "RemoveContainer" containerID="0d28036ed6036e6481aee8b4bcf6495b6a315357cbfe112ee960f58a8d3f9c44" Dec 15 08:56:24 crc kubenswrapper[4876]: I1215 08:56:24.990715 4876 scope.go:117] "RemoveContainer" containerID="fdbece3d9f8a1801e37653081659817f45f88abae24171f33b80152de8678537" Dec 15 08:56:25 crc kubenswrapper[4876]: I1215 08:56:25.041835 4876 scope.go:117] "RemoveContainer" containerID="b7f3873b611dd902ada85c97f4b85da8179426f4731d48bfea152ec897008786" Dec 15 08:56:27 crc kubenswrapper[4876]: I1215 08:56:27.323031 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:56:27 crc kubenswrapper[4876]: I1215 08:56:27.323752 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.356670 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-ff77b9795-l9tgr"] Dec 15 08:56:39 crc kubenswrapper[4876]: E1215 08:56:39.358035 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerName="extract-content" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.358067 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerName="extract-content" Dec 15 08:56:39 crc kubenswrapper[4876]: E1215 08:56:39.358090 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerName="registry-server" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.358099 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerName="registry-server" Dec 15 08:56:39 crc kubenswrapper[4876]: E1215 08:56:39.358132 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerName="extract-utilities" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.358141 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerName="extract-utilities" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.358378 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc1a93cb-fb34-4718-b742-aa9379d0cc8b" containerName="registry-server" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.359648 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.362739 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.362997 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.363049 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.373972 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-jt4dg" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.377337 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-ff77b9795-l9tgr"] Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.419200 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.419564 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="57c1baf1-a8c2-4d51-957f-506f65ae322d" containerName="glance-log" containerID="cri-o://412d1fc7c07db9fe1b461923a490a486ad3773c9a21809ffbc371a1804994683" gracePeriod=30 Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.420181 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="57c1baf1-a8c2-4d51-957f-506f65ae322d" containerName="glance-httpd" containerID="cri-o://01e43229f75b3fd7b4153fa581eb61f3e720307a2d0c54cccf8cf327d700d6ef" gracePeriod=30 Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.477253 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-785c497ff5-glmmb"] Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.482514 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.494795 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-horizon-secret-key\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.494847 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-config-data\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.494883 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfqh2\" (UniqueName: \"kubernetes.io/projected/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-kube-api-access-dfqh2\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.495143 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-scripts\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.495199 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-logs\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.503779 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-785c497ff5-glmmb"] Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.554537 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.554791 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="588da42d-4a4a-4a12-80dc-79a76c714258" containerName="glance-log" containerID="cri-o://e4c225a9aa22d556b60fb700bada998d4cd233542e748d0d8a6532d8bedaa65e" gracePeriod=30 Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.554952 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="588da42d-4a4a-4a12-80dc-79a76c714258" containerName="glance-httpd" containerID="cri-o://c6b200ccb13c1fb7fd76d1de54ae7a1f85fa283104d8a492c8871dbb7dceff04" gracePeriod=30 Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.596970 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-scripts\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.597035 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-scripts\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.597078 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-logs\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.597197 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f84774b-c535-416f-81cc-d13f396b5ead-logs\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.597373 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6f84774b-c535-416f-81cc-d13f396b5ead-horizon-secret-key\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.597645 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-logs\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.597646 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9d9v\" (UniqueName: \"kubernetes.io/projected/6f84774b-c535-416f-81cc-d13f396b5ead-kube-api-access-s9d9v\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.598298 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-horizon-secret-key\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.597917 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-scripts\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.598323 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-config-data\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.598414 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfqh2\" (UniqueName: \"kubernetes.io/projected/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-kube-api-access-dfqh2\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.598503 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-config-data\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.599376 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-config-data\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.604396 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-horizon-secret-key\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.615942 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfqh2\" (UniqueName: \"kubernetes.io/projected/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-kube-api-access-dfqh2\") pod \"horizon-ff77b9795-l9tgr\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.683964 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.700269 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-scripts\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.700332 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f84774b-c535-416f-81cc-d13f396b5ead-logs\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.700379 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6f84774b-c535-416f-81cc-d13f396b5ead-horizon-secret-key\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.700427 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9d9v\" (UniqueName: \"kubernetes.io/projected/6f84774b-c535-416f-81cc-d13f396b5ead-kube-api-access-s9d9v\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.700489 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-config-data\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.701117 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f84774b-c535-416f-81cc-d13f396b5ead-logs\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.702366 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-config-data\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.703340 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-scripts\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.705156 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6f84774b-c535-416f-81cc-d13f396b5ead-horizon-secret-key\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.722236 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9d9v\" (UniqueName: \"kubernetes.io/projected/6f84774b-c535-416f-81cc-d13f396b5ead-kube-api-access-s9d9v\") pod \"horizon-785c497ff5-glmmb\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:39 crc kubenswrapper[4876]: I1215 08:56:39.814162 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.069968 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-785c497ff5-glmmb"] Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.091042 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-67cc7b585c-xqmvf"] Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.092545 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.113985 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-67cc7b585c-xqmvf"] Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.122958 4876 generic.go:334] "Generic (PLEG): container finished" podID="588da42d-4a4a-4a12-80dc-79a76c714258" containerID="e4c225a9aa22d556b60fb700bada998d4cd233542e748d0d8a6532d8bedaa65e" exitCode=143 Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.123039 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"588da42d-4a4a-4a12-80dc-79a76c714258","Type":"ContainerDied","Data":"e4c225a9aa22d556b60fb700bada998d4cd233542e748d0d8a6532d8bedaa65e"} Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.135706 4876 generic.go:334] "Generic (PLEG): container finished" podID="57c1baf1-a8c2-4d51-957f-506f65ae322d" containerID="412d1fc7c07db9fe1b461923a490a486ad3773c9a21809ffbc371a1804994683" exitCode=143 Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.135753 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57c1baf1-a8c2-4d51-957f-506f65ae322d","Type":"ContainerDied","Data":"412d1fc7c07db9fe1b461923a490a486ad3773c9a21809ffbc371a1804994683"} Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.212013 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-scripts\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.212131 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08e91091-a77c-43e3-b8a8-a20b578ad95c-logs\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.212159 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmmtq\" (UniqueName: \"kubernetes.io/projected/08e91091-a77c-43e3-b8a8-a20b578ad95c-kube-api-access-rmmtq\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.212195 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/08e91091-a77c-43e3-b8a8-a20b578ad95c-horizon-secret-key\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.212274 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-config-data\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.231335 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-ff77b9795-l9tgr"] Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.314432 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-scripts\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.314519 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08e91091-a77c-43e3-b8a8-a20b578ad95c-logs\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.314541 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmmtq\" (UniqueName: \"kubernetes.io/projected/08e91091-a77c-43e3-b8a8-a20b578ad95c-kube-api-access-rmmtq\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.314571 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/08e91091-a77c-43e3-b8a8-a20b578ad95c-horizon-secret-key\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.314626 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-config-data\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.315287 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-scripts\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.315912 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08e91091-a77c-43e3-b8a8-a20b578ad95c-logs\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.316697 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-config-data\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.321344 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/08e91091-a77c-43e3-b8a8-a20b578ad95c-horizon-secret-key\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.335339 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmmtq\" (UniqueName: \"kubernetes.io/projected/08e91091-a77c-43e3-b8a8-a20b578ad95c-kube-api-access-rmmtq\") pod \"horizon-67cc7b585c-xqmvf\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.346730 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-785c497ff5-glmmb"] Dec 15 08:56:40 crc kubenswrapper[4876]: W1215 08:56:40.349610 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f84774b_c535_416f_81cc_d13f396b5ead.slice/crio-c4cfbd6e919c2403a66d112b0503d66ee7deeb75495afefcf2944a1fdf825a6f WatchSource:0}: Error finding container c4cfbd6e919c2403a66d112b0503d66ee7deeb75495afefcf2944a1fdf825a6f: Status 404 returned error can't find the container with id c4cfbd6e919c2403a66d112b0503d66ee7deeb75495afefcf2944a1fdf825a6f Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.440086 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:56:40 crc kubenswrapper[4876]: I1215 08:56:40.934178 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-67cc7b585c-xqmvf"] Dec 15 08:56:40 crc kubenswrapper[4876]: W1215 08:56:40.945293 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08e91091_a77c_43e3_b8a8_a20b578ad95c.slice/crio-1e77350a88961697bf386abfc4e4581c5dd21d8b426bbd4ee3ecef341586299a WatchSource:0}: Error finding container 1e77350a88961697bf386abfc4e4581c5dd21d8b426bbd4ee3ecef341586299a: Status 404 returned error can't find the container with id 1e77350a88961697bf386abfc4e4581c5dd21d8b426bbd4ee3ecef341586299a Dec 15 08:56:41 crc kubenswrapper[4876]: I1215 08:56:41.146148 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67cc7b585c-xqmvf" event={"ID":"08e91091-a77c-43e3-b8a8-a20b578ad95c","Type":"ContainerStarted","Data":"1e77350a88961697bf386abfc4e4581c5dd21d8b426bbd4ee3ecef341586299a"} Dec 15 08:56:41 crc kubenswrapper[4876]: I1215 08:56:41.147279 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-785c497ff5-glmmb" event={"ID":"6f84774b-c535-416f-81cc-d13f396b5ead","Type":"ContainerStarted","Data":"c4cfbd6e919c2403a66d112b0503d66ee7deeb75495afefcf2944a1fdf825a6f"} Dec 15 08:56:41 crc kubenswrapper[4876]: I1215 08:56:41.148621 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff77b9795-l9tgr" event={"ID":"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad","Type":"ContainerStarted","Data":"16dd7213d1dc74cabf42d6dbedec0ff83601cb944cf578766b7a22bbb1f10f31"} Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.186125 4876 generic.go:334] "Generic (PLEG): container finished" podID="588da42d-4a4a-4a12-80dc-79a76c714258" containerID="c6b200ccb13c1fb7fd76d1de54ae7a1f85fa283104d8a492c8871dbb7dceff04" exitCode=0 Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.186136 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"588da42d-4a4a-4a12-80dc-79a76c714258","Type":"ContainerDied","Data":"c6b200ccb13c1fb7fd76d1de54ae7a1f85fa283104d8a492c8871dbb7dceff04"} Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.188758 4876 generic.go:334] "Generic (PLEG): container finished" podID="57c1baf1-a8c2-4d51-957f-506f65ae322d" containerID="01e43229f75b3fd7b4153fa581eb61f3e720307a2d0c54cccf8cf327d700d6ef" exitCode=0 Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.188803 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57c1baf1-a8c2-4d51-957f-506f65ae322d","Type":"ContainerDied","Data":"01e43229f75b3fd7b4153fa581eb61f3e720307a2d0c54cccf8cf327d700d6ef"} Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.188836 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"57c1baf1-a8c2-4d51-957f-506f65ae322d","Type":"ContainerDied","Data":"c3ab203d27151bd0323b1ddc07402429f56a7ef7c668a74497554f8ad6c0efe9"} Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.188848 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3ab203d27151bd0323b1ddc07402429f56a7ef7c668a74497554f8ad6c0efe9" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.230598 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.394650 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.396368 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-combined-ca-bundle\") pod \"57c1baf1-a8c2-4d51-957f-506f65ae322d\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.396458 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-logs\") pod \"57c1baf1-a8c2-4d51-957f-506f65ae322d\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.396485 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-config-data\") pod \"57c1baf1-a8c2-4d51-957f-506f65ae322d\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.396526 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-ceph\") pod \"57c1baf1-a8c2-4d51-957f-506f65ae322d\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.396558 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlrl5\" (UniqueName: \"kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-kube-api-access-dlrl5\") pod \"57c1baf1-a8c2-4d51-957f-506f65ae322d\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.396663 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-scripts\") pod \"57c1baf1-a8c2-4d51-957f-506f65ae322d\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.396702 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-httpd-run\") pod \"57c1baf1-a8c2-4d51-957f-506f65ae322d\" (UID: \"57c1baf1-a8c2-4d51-957f-506f65ae322d\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.397733 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-logs" (OuterVolumeSpecName: "logs") pod "57c1baf1-a8c2-4d51-957f-506f65ae322d" (UID: "57c1baf1-a8c2-4d51-957f-506f65ae322d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.398205 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "57c1baf1-a8c2-4d51-957f-506f65ae322d" (UID: "57c1baf1-a8c2-4d51-957f-506f65ae322d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.405984 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-ceph" (OuterVolumeSpecName: "ceph") pod "57c1baf1-a8c2-4d51-957f-506f65ae322d" (UID: "57c1baf1-a8c2-4d51-957f-506f65ae322d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.411571 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-kube-api-access-dlrl5" (OuterVolumeSpecName: "kube-api-access-dlrl5") pod "57c1baf1-a8c2-4d51-957f-506f65ae322d" (UID: "57c1baf1-a8c2-4d51-957f-506f65ae322d"). InnerVolumeSpecName "kube-api-access-dlrl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.422462 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-scripts" (OuterVolumeSpecName: "scripts") pod "57c1baf1-a8c2-4d51-957f-506f65ae322d" (UID: "57c1baf1-a8c2-4d51-957f-506f65ae322d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.463867 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57c1baf1-a8c2-4d51-957f-506f65ae322d" (UID: "57c1baf1-a8c2-4d51-957f-506f65ae322d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.469398 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-config-data" (OuterVolumeSpecName: "config-data") pod "57c1baf1-a8c2-4d51-957f-506f65ae322d" (UID: "57c1baf1-a8c2-4d51-957f-506f65ae322d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.499507 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-combined-ca-bundle\") pod \"588da42d-4a4a-4a12-80dc-79a76c714258\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.499564 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-scripts\") pod \"588da42d-4a4a-4a12-80dc-79a76c714258\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.499584 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5llmt\" (UniqueName: \"kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-kube-api-access-5llmt\") pod \"588da42d-4a4a-4a12-80dc-79a76c714258\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.499634 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-httpd-run\") pod \"588da42d-4a4a-4a12-80dc-79a76c714258\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.499719 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-ceph\") pod \"588da42d-4a4a-4a12-80dc-79a76c714258\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.499993 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-logs\") pod \"588da42d-4a4a-4a12-80dc-79a76c714258\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.500023 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-config-data\") pod \"588da42d-4a4a-4a12-80dc-79a76c714258\" (UID: \"588da42d-4a4a-4a12-80dc-79a76c714258\") " Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.501416 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.501828 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.501845 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.501897 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/57c1baf1-a8c2-4d51-957f-506f65ae322d-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.501908 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57c1baf1-a8c2-4d51-957f-506f65ae322d-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.501920 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.501930 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlrl5\" (UniqueName: \"kubernetes.io/projected/57c1baf1-a8c2-4d51-957f-506f65ae322d-kube-api-access-dlrl5\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.502680 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-logs" (OuterVolumeSpecName: "logs") pod "588da42d-4a4a-4a12-80dc-79a76c714258" (UID: "588da42d-4a4a-4a12-80dc-79a76c714258"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.503669 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "588da42d-4a4a-4a12-80dc-79a76c714258" (UID: "588da42d-4a4a-4a12-80dc-79a76c714258"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.504514 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-ceph" (OuterVolumeSpecName: "ceph") pod "588da42d-4a4a-4a12-80dc-79a76c714258" (UID: "588da42d-4a4a-4a12-80dc-79a76c714258"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.506741 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-scripts" (OuterVolumeSpecName: "scripts") pod "588da42d-4a4a-4a12-80dc-79a76c714258" (UID: "588da42d-4a4a-4a12-80dc-79a76c714258"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.506906 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-kube-api-access-5llmt" (OuterVolumeSpecName: "kube-api-access-5llmt") pod "588da42d-4a4a-4a12-80dc-79a76c714258" (UID: "588da42d-4a4a-4a12-80dc-79a76c714258"). InnerVolumeSpecName "kube-api-access-5llmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.551354 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "588da42d-4a4a-4a12-80dc-79a76c714258" (UID: "588da42d-4a4a-4a12-80dc-79a76c714258"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.565343 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-config-data" (OuterVolumeSpecName: "config-data") pod "588da42d-4a4a-4a12-80dc-79a76c714258" (UID: "588da42d-4a4a-4a12-80dc-79a76c714258"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.605749 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.605787 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.605797 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5llmt\" (UniqueName: \"kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-kube-api-access-5llmt\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.605807 4876 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.605817 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/588da42d-4a4a-4a12-80dc-79a76c714258-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.605826 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/588da42d-4a4a-4a12-80dc-79a76c714258-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:43 crc kubenswrapper[4876]: I1215 08:56:43.605842 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/588da42d-4a4a-4a12-80dc-79a76c714258-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.210858 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.216259 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"588da42d-4a4a-4a12-80dc-79a76c714258","Type":"ContainerDied","Data":"b8b202ab4db0c8cca3c4020d98fdfc3e72b2304a39b0238f123900af80d4b1db"} Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.216484 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.217365 4876 scope.go:117] "RemoveContainer" containerID="c6b200ccb13c1fb7fd76d1de54ae7a1f85fa283104d8a492c8871dbb7dceff04" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.266496 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.275489 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.294145 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.305730 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.334175 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:56:44 crc kubenswrapper[4876]: E1215 08:56:44.334877 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="588da42d-4a4a-4a12-80dc-79a76c714258" containerName="glance-log" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.334892 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="588da42d-4a4a-4a12-80dc-79a76c714258" containerName="glance-log" Dec 15 08:56:44 crc kubenswrapper[4876]: E1215 08:56:44.334912 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c1baf1-a8c2-4d51-957f-506f65ae322d" containerName="glance-log" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.334918 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c1baf1-a8c2-4d51-957f-506f65ae322d" containerName="glance-log" Dec 15 08:56:44 crc kubenswrapper[4876]: E1215 08:56:44.334933 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c1baf1-a8c2-4d51-957f-506f65ae322d" containerName="glance-httpd" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.334940 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c1baf1-a8c2-4d51-957f-506f65ae322d" containerName="glance-httpd" Dec 15 08:56:44 crc kubenswrapper[4876]: E1215 08:56:44.334960 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="588da42d-4a4a-4a12-80dc-79a76c714258" containerName="glance-httpd" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.334965 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="588da42d-4a4a-4a12-80dc-79a76c714258" containerName="glance-httpd" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.335223 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="57c1baf1-a8c2-4d51-957f-506f65ae322d" containerName="glance-httpd" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.335238 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="588da42d-4a4a-4a12-80dc-79a76c714258" containerName="glance-log" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.335254 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="588da42d-4a4a-4a12-80dc-79a76c714258" containerName="glance-httpd" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.335267 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="57c1baf1-a8c2-4d51-957f-506f65ae322d" containerName="glance-log" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.336313 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.342238 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.347542 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.349625 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.359678 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.359820 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.359902 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tb9rg" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.363844 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.400018 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.420969 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7067c227-f2bf-4b06-975c-9b66655d1d2c-ceph\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.421019 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7067c227-f2bf-4b06-975c-9b66655d1d2c-logs\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.421115 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvv2p\" (UniqueName: \"kubernetes.io/projected/7067c227-f2bf-4b06-975c-9b66655d1d2c-kube-api-access-qvv2p\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.421146 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7067c227-f2bf-4b06-975c-9b66655d1d2c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.421243 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7067c227-f2bf-4b06-975c-9b66655d1d2c-config-data\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.421273 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7067c227-f2bf-4b06-975c-9b66655d1d2c-scripts\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.421300 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7067c227-f2bf-4b06-975c-9b66655d1d2c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.522617 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7067c227-f2bf-4b06-975c-9b66655d1d2c-logs\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.522729 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgsk2\" (UniqueName: \"kubernetes.io/projected/487e865a-734d-433a-8ecd-8c2839afe8db-kube-api-access-hgsk2\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.522787 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/487e865a-734d-433a-8ecd-8c2839afe8db-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.522825 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvv2p\" (UniqueName: \"kubernetes.io/projected/7067c227-f2bf-4b06-975c-9b66655d1d2c-kube-api-access-qvv2p\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.522974 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7067c227-f2bf-4b06-975c-9b66655d1d2c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.523055 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/487e865a-734d-433a-8ecd-8c2839afe8db-config-data\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.523158 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/487e865a-734d-433a-8ecd-8c2839afe8db-logs\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.523330 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/487e865a-734d-433a-8ecd-8c2839afe8db-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.523379 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7067c227-f2bf-4b06-975c-9b66655d1d2c-config-data\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.523405 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7067c227-f2bf-4b06-975c-9b66655d1d2c-logs\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.523427 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7067c227-f2bf-4b06-975c-9b66655d1d2c-scripts\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.523483 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7067c227-f2bf-4b06-975c-9b66655d1d2c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.523969 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/487e865a-734d-433a-8ecd-8c2839afe8db-scripts\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.524059 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/487e865a-734d-433a-8ecd-8c2839afe8db-ceph\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.524086 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7067c227-f2bf-4b06-975c-9b66655d1d2c-ceph\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.525727 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7067c227-f2bf-4b06-975c-9b66655d1d2c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.528018 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7067c227-f2bf-4b06-975c-9b66655d1d2c-ceph\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.528185 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7067c227-f2bf-4b06-975c-9b66655d1d2c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.529290 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7067c227-f2bf-4b06-975c-9b66655d1d2c-scripts\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.530792 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7067c227-f2bf-4b06-975c-9b66655d1d2c-config-data\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.562982 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvv2p\" (UniqueName: \"kubernetes.io/projected/7067c227-f2bf-4b06-975c-9b66655d1d2c-kube-api-access-qvv2p\") pod \"glance-default-external-api-0\" (UID: \"7067c227-f2bf-4b06-975c-9b66655d1d2c\") " pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.628206 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/487e865a-734d-433a-8ecd-8c2839afe8db-scripts\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.628267 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/487e865a-734d-433a-8ecd-8c2839afe8db-ceph\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.628317 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgsk2\" (UniqueName: \"kubernetes.io/projected/487e865a-734d-433a-8ecd-8c2839afe8db-kube-api-access-hgsk2\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.629326 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/487e865a-734d-433a-8ecd-8c2839afe8db-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.629754 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/487e865a-734d-433a-8ecd-8c2839afe8db-config-data\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.629866 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/487e865a-734d-433a-8ecd-8c2839afe8db-logs\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.629941 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/487e865a-734d-433a-8ecd-8c2839afe8db-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.630494 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/487e865a-734d-433a-8ecd-8c2839afe8db-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.630597 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/487e865a-734d-433a-8ecd-8c2839afe8db-logs\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.633148 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/487e865a-734d-433a-8ecd-8c2839afe8db-scripts\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.634037 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/487e865a-734d-433a-8ecd-8c2839afe8db-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.642953 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/487e865a-734d-433a-8ecd-8c2839afe8db-ceph\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.646589 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/487e865a-734d-433a-8ecd-8c2839afe8db-config-data\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.649766 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgsk2\" (UniqueName: \"kubernetes.io/projected/487e865a-734d-433a-8ecd-8c2839afe8db-kube-api-access-hgsk2\") pod \"glance-default-internal-api-0\" (UID: \"487e865a-734d-433a-8ecd-8c2839afe8db\") " pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.664179 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.716989 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.718830 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57c1baf1-a8c2-4d51-957f-506f65ae322d" path="/var/lib/kubelet/pods/57c1baf1-a8c2-4d51-957f-506f65ae322d/volumes" Dec 15 08:56:44 crc kubenswrapper[4876]: I1215 08:56:44.719991 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="588da42d-4a4a-4a12-80dc-79a76c714258" path="/var/lib/kubelet/pods/588da42d-4a4a-4a12-80dc-79a76c714258/volumes" Dec 15 08:56:51 crc kubenswrapper[4876]: I1215 08:56:51.222008 4876 scope.go:117] "RemoveContainer" containerID="e4c225a9aa22d556b60fb700bada998d4cd233542e748d0d8a6532d8bedaa65e" Dec 15 08:56:51 crc kubenswrapper[4876]: I1215 08:56:51.898282 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 15 08:56:51 crc kubenswrapper[4876]: W1215 08:56:51.906017 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod487e865a_734d_433a_8ecd_8c2839afe8db.slice/crio-35672440d127e26610fa3ff0e4d73e6842bfbf25a14486e245e6da3c5ba3dc3a WatchSource:0}: Error finding container 35672440d127e26610fa3ff0e4d73e6842bfbf25a14486e245e6da3c5ba3dc3a: Status 404 returned error can't find the container with id 35672440d127e26610fa3ff0e4d73e6842bfbf25a14486e245e6da3c5ba3dc3a Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.038733 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.306996 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-785c497ff5-glmmb" event={"ID":"6f84774b-c535-416f-81cc-d13f396b5ead","Type":"ContainerStarted","Data":"d363d62caffa8737c218578d5c45f08a9030bd1ea12b08a3c073aa191eb92205"} Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.307045 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-785c497ff5-glmmb" event={"ID":"6f84774b-c535-416f-81cc-d13f396b5ead","Type":"ContainerStarted","Data":"cc3c28d8555a01fc6d7756e4c96df0136529ae5b4b53913df508aeb05f05f83d"} Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.307203 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-785c497ff5-glmmb" podUID="6f84774b-c535-416f-81cc-d13f396b5ead" containerName="horizon-log" containerID="cri-o://cc3c28d8555a01fc6d7756e4c96df0136529ae5b4b53913df508aeb05f05f83d" gracePeriod=30 Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.307841 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-785c497ff5-glmmb" podUID="6f84774b-c535-416f-81cc-d13f396b5ead" containerName="horizon" containerID="cri-o://d363d62caffa8737c218578d5c45f08a9030bd1ea12b08a3c073aa191eb92205" gracePeriod=30 Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.312483 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"487e865a-734d-433a-8ecd-8c2839afe8db","Type":"ContainerStarted","Data":"35672440d127e26610fa3ff0e4d73e6842bfbf25a14486e245e6da3c5ba3dc3a"} Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.315495 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff77b9795-l9tgr" event={"ID":"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad","Type":"ContainerStarted","Data":"7fe0a4fd68238e9bf261cb93dcffc0197a5f2bca17694cc9a598ad308ed2c162"} Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.315539 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff77b9795-l9tgr" event={"ID":"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad","Type":"ContainerStarted","Data":"48eeb68c4a0f84e029e01ea083a76e3f10443e2a9ce8c20f712977a2e69eb3bd"} Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.328456 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-785c497ff5-glmmb" podStartSLOduration=2.305397338 podStartE2EDuration="13.328433223s" podCreationTimestamp="2025-12-15 08:56:39 +0000 UTC" firstStartedPulling="2025-12-15 08:56:40.351226184 +0000 UTC m=+7525.922369095" lastFinishedPulling="2025-12-15 08:56:51.374262069 +0000 UTC m=+7536.945404980" observedRunningTime="2025-12-15 08:56:52.32755784 +0000 UTC m=+7537.898700761" watchObservedRunningTime="2025-12-15 08:56:52.328433223 +0000 UTC m=+7537.899576154" Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.329444 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7067c227-f2bf-4b06-975c-9b66655d1d2c","Type":"ContainerStarted","Data":"c754c664217a6b3bd79e7c63c896fd6d32129c15c4507c20f4f149baf4f8085f"} Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.336458 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67cc7b585c-xqmvf" event={"ID":"08e91091-a77c-43e3-b8a8-a20b578ad95c","Type":"ContainerStarted","Data":"b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c"} Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.336511 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67cc7b585c-xqmvf" event={"ID":"08e91091-a77c-43e3-b8a8-a20b578ad95c","Type":"ContainerStarted","Data":"1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a"} Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.361562 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-ff77b9795-l9tgr" podStartSLOduration=2.225648379 podStartE2EDuration="13.361540054s" podCreationTimestamp="2025-12-15 08:56:39 +0000 UTC" firstStartedPulling="2025-12-15 08:56:40.238191229 +0000 UTC m=+7525.809334140" lastFinishedPulling="2025-12-15 08:56:51.374082904 +0000 UTC m=+7536.945225815" observedRunningTime="2025-12-15 08:56:52.346266233 +0000 UTC m=+7537.917409174" watchObservedRunningTime="2025-12-15 08:56:52.361540054 +0000 UTC m=+7537.932682965" Dec 15 08:56:52 crc kubenswrapper[4876]: I1215 08:56:52.404467 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-67cc7b585c-xqmvf" podStartSLOduration=1.9414386110000001 podStartE2EDuration="12.404445093s" podCreationTimestamp="2025-12-15 08:56:40 +0000 UTC" firstStartedPulling="2025-12-15 08:56:40.947356847 +0000 UTC m=+7526.518499758" lastFinishedPulling="2025-12-15 08:56:51.410363329 +0000 UTC m=+7536.981506240" observedRunningTime="2025-12-15 08:56:52.375497501 +0000 UTC m=+7537.946640432" watchObservedRunningTime="2025-12-15 08:56:52.404445093 +0000 UTC m=+7537.975588004" Dec 15 08:56:53 crc kubenswrapper[4876]: I1215 08:56:53.349603 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"487e865a-734d-433a-8ecd-8c2839afe8db","Type":"ContainerStarted","Data":"bf41ba50dbe06375f9c03dbd6ff10bbbeb58d69447c798893d7f2b34d323e547"} Dec 15 08:56:53 crc kubenswrapper[4876]: I1215 08:56:53.350155 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"487e865a-734d-433a-8ecd-8c2839afe8db","Type":"ContainerStarted","Data":"e3066ad94cf024a1b4e1a15d377d835bc7e8594b16933a83d201bf46560539ba"} Dec 15 08:56:53 crc kubenswrapper[4876]: I1215 08:56:53.358685 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7067c227-f2bf-4b06-975c-9b66655d1d2c","Type":"ContainerStarted","Data":"d7844ff22d7f9a760cca59f6a83f26e1e3725acac3ffe94afb2fb01836319b77"} Dec 15 08:56:53 crc kubenswrapper[4876]: I1215 08:56:53.382245 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.382227298 podStartE2EDuration="9.382227298s" podCreationTimestamp="2025-12-15 08:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:56:53.374991408 +0000 UTC m=+7538.946134319" watchObservedRunningTime="2025-12-15 08:56:53.382227298 +0000 UTC m=+7538.953370209" Dec 15 08:56:54 crc kubenswrapper[4876]: I1215 08:56:54.370887 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7067c227-f2bf-4b06-975c-9b66655d1d2c","Type":"ContainerStarted","Data":"a400ae8104265b79b72e4a8d0acb164b6cb75ea8bd550538032c4117b1a4754d"} Dec 15 08:56:54 crc kubenswrapper[4876]: I1215 08:56:54.401967 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=10.401944937 podStartE2EDuration="10.401944937s" podCreationTimestamp="2025-12-15 08:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:56:54.393876535 +0000 UTC m=+7539.965019446" watchObservedRunningTime="2025-12-15 08:56:54.401944937 +0000 UTC m=+7539.973087848" Dec 15 08:56:54 crc kubenswrapper[4876]: I1215 08:56:54.665123 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 15 08:56:54 crc kubenswrapper[4876]: I1215 08:56:54.665405 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 15 08:56:54 crc kubenswrapper[4876]: I1215 08:56:54.695774 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 15 08:56:54 crc kubenswrapper[4876]: I1215 08:56:54.735048 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 15 08:56:54 crc kubenswrapper[4876]: I1215 08:56:54.735296 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 15 08:56:54 crc kubenswrapper[4876]: I1215 08:56:54.735410 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 15 08:56:54 crc kubenswrapper[4876]: I1215 08:56:54.750870 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 15 08:56:54 crc kubenswrapper[4876]: I1215 08:56:54.783902 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 15 08:56:55 crc kubenswrapper[4876]: I1215 08:56:55.378283 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 15 08:56:55 crc kubenswrapper[4876]: I1215 08:56:55.378531 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 15 08:56:55 crc kubenswrapper[4876]: I1215 08:56:55.378556 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 15 08:56:55 crc kubenswrapper[4876]: I1215 08:56:55.378570 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 15 08:56:57 crc kubenswrapper[4876]: I1215 08:56:57.322490 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:56:57 crc kubenswrapper[4876]: I1215 08:56:57.322815 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:56:57 crc kubenswrapper[4876]: I1215 08:56:57.322864 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 08:56:57 crc kubenswrapper[4876]: I1215 08:56:57.323621 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"29d85bfa3ef5674dabcb7af04441e135ceccc897cf5ebfb55f5a95b2fb0e0285"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 08:56:57 crc kubenswrapper[4876]: I1215 08:56:57.323692 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://29d85bfa3ef5674dabcb7af04441e135ceccc897cf5ebfb55f5a95b2fb0e0285" gracePeriod=600 Dec 15 08:56:58 crc kubenswrapper[4876]: I1215 08:56:58.421747 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="29d85bfa3ef5674dabcb7af04441e135ceccc897cf5ebfb55f5a95b2fb0e0285" exitCode=0 Dec 15 08:56:58 crc kubenswrapper[4876]: I1215 08:56:58.422190 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"29d85bfa3ef5674dabcb7af04441e135ceccc897cf5ebfb55f5a95b2fb0e0285"} Dec 15 08:56:58 crc kubenswrapper[4876]: I1215 08:56:58.422252 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3"} Dec 15 08:56:58 crc kubenswrapper[4876]: I1215 08:56:58.422273 4876 scope.go:117] "RemoveContainer" containerID="10df69224009d6d1f42c4c8fbc2191b6625ea9f3edf565d6c74321a40d0ff2e0" Dec 15 08:56:59 crc kubenswrapper[4876]: I1215 08:56:59.684324 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:59 crc kubenswrapper[4876]: I1215 08:56:59.684635 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:56:59 crc kubenswrapper[4876]: I1215 08:56:59.814852 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:57:00 crc kubenswrapper[4876]: I1215 08:57:00.441087 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:57:00 crc kubenswrapper[4876]: I1215 08:57:00.441520 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:57:09 crc kubenswrapper[4876]: I1215 08:57:09.686184 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-ff77b9795-l9tgr" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.103:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.103:8080: connect: connection refused" Dec 15 08:57:10 crc kubenswrapper[4876]: I1215 08:57:10.443771 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-67cc7b585c-xqmvf" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.105:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.105:8080: connect: connection refused" Dec 15 08:57:16 crc kubenswrapper[4876]: I1215 08:57:16.677994 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 15 08:57:16 crc kubenswrapper[4876]: I1215 08:57:16.694145 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 15 08:57:16 crc kubenswrapper[4876]: I1215 08:57:16.859903 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 15 08:57:16 crc kubenswrapper[4876]: I1215 08:57:16.937634 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 15 08:57:21 crc kubenswrapper[4876]: I1215 08:57:21.984791 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:57:22 crc kubenswrapper[4876]: I1215 08:57:22.498080 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:57:22 crc kubenswrapper[4876]: E1215 08:57:22.653516 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f84774b_c535_416f_81cc_d13f396b5ead.slice/crio-conmon-d363d62caffa8737c218578d5c45f08a9030bd1ea12b08a3c073aa191eb92205.scope\": RecentStats: unable to find data in memory cache]" Dec 15 08:57:22 crc kubenswrapper[4876]: I1215 08:57:22.661662 4876 generic.go:334] "Generic (PLEG): container finished" podID="6f84774b-c535-416f-81cc-d13f396b5ead" containerID="d363d62caffa8737c218578d5c45f08a9030bd1ea12b08a3c073aa191eb92205" exitCode=137 Dec 15 08:57:22 crc kubenswrapper[4876]: I1215 08:57:22.661692 4876 generic.go:334] "Generic (PLEG): container finished" podID="6f84774b-c535-416f-81cc-d13f396b5ead" containerID="cc3c28d8555a01fc6d7756e4c96df0136529ae5b4b53913df508aeb05f05f83d" exitCode=137 Dec 15 08:57:22 crc kubenswrapper[4876]: I1215 08:57:22.661719 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-785c497ff5-glmmb" event={"ID":"6f84774b-c535-416f-81cc-d13f396b5ead","Type":"ContainerDied","Data":"d363d62caffa8737c218578d5c45f08a9030bd1ea12b08a3c073aa191eb92205"} Dec 15 08:57:22 crc kubenswrapper[4876]: I1215 08:57:22.661754 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-785c497ff5-glmmb" event={"ID":"6f84774b-c535-416f-81cc-d13f396b5ead","Type":"ContainerDied","Data":"cc3c28d8555a01fc6d7756e4c96df0136529ae5b4b53913df508aeb05f05f83d"} Dec 15 08:57:22 crc kubenswrapper[4876]: I1215 08:57:22.895468 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.039710 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6f84774b-c535-416f-81cc-d13f396b5ead-horizon-secret-key\") pod \"6f84774b-c535-416f-81cc-d13f396b5ead\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.039885 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9d9v\" (UniqueName: \"kubernetes.io/projected/6f84774b-c535-416f-81cc-d13f396b5ead-kube-api-access-s9d9v\") pod \"6f84774b-c535-416f-81cc-d13f396b5ead\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.039950 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-scripts\") pod \"6f84774b-c535-416f-81cc-d13f396b5ead\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.040012 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-config-data\") pod \"6f84774b-c535-416f-81cc-d13f396b5ead\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.040094 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f84774b-c535-416f-81cc-d13f396b5ead-logs\") pod \"6f84774b-c535-416f-81cc-d13f396b5ead\" (UID: \"6f84774b-c535-416f-81cc-d13f396b5ead\") " Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.040867 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f84774b-c535-416f-81cc-d13f396b5ead-logs" (OuterVolumeSpecName: "logs") pod "6f84774b-c535-416f-81cc-d13f396b5ead" (UID: "6f84774b-c535-416f-81cc-d13f396b5ead"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.046236 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f84774b-c535-416f-81cc-d13f396b5ead-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "6f84774b-c535-416f-81cc-d13f396b5ead" (UID: "6f84774b-c535-416f-81cc-d13f396b5ead"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.046816 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f84774b-c535-416f-81cc-d13f396b5ead-kube-api-access-s9d9v" (OuterVolumeSpecName: "kube-api-access-s9d9v") pod "6f84774b-c535-416f-81cc-d13f396b5ead" (UID: "6f84774b-c535-416f-81cc-d13f396b5ead"). InnerVolumeSpecName "kube-api-access-s9d9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.068415 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-config-data" (OuterVolumeSpecName: "config-data") pod "6f84774b-c535-416f-81cc-d13f396b5ead" (UID: "6f84774b-c535-416f-81cc-d13f396b5ead"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.068643 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-scripts" (OuterVolumeSpecName: "scripts") pod "6f84774b-c535-416f-81cc-d13f396b5ead" (UID: "6f84774b-c535-416f-81cc-d13f396b5ead"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.142338 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f84774b-c535-416f-81cc-d13f396b5ead-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.142379 4876 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6f84774b-c535-416f-81cc-d13f396b5ead-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.142392 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9d9v\" (UniqueName: \"kubernetes.io/projected/6f84774b-c535-416f-81cc-d13f396b5ead-kube-api-access-s9d9v\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.142401 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.142411 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6f84774b-c535-416f-81cc-d13f396b5ead-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.671577 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-785c497ff5-glmmb" event={"ID":"6f84774b-c535-416f-81cc-d13f396b5ead","Type":"ContainerDied","Data":"c4cfbd6e919c2403a66d112b0503d66ee7deeb75495afefcf2944a1fdf825a6f"} Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.671628 4876 scope.go:117] "RemoveContainer" containerID="d363d62caffa8737c218578d5c45f08a9030bd1ea12b08a3c073aa191eb92205" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.671757 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-785c497ff5-glmmb" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.709611 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-785c497ff5-glmmb"] Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.719706 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-785c497ff5-glmmb"] Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.784120 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:57:23 crc kubenswrapper[4876]: I1215 08:57:23.885888 4876 scope.go:117] "RemoveContainer" containerID="cc3c28d8555a01fc6d7756e4c96df0136529ae5b4b53913df508aeb05f05f83d" Dec 15 08:57:24 crc kubenswrapper[4876]: I1215 08:57:24.267028 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:57:24 crc kubenswrapper[4876]: I1215 08:57:24.326770 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-ff77b9795-l9tgr"] Dec 15 08:57:24 crc kubenswrapper[4876]: I1215 08:57:24.682211 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-ff77b9795-l9tgr" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon-log" containerID="cri-o://48eeb68c4a0f84e029e01ea083a76e3f10443e2a9ce8c20f712977a2e69eb3bd" gracePeriod=30 Dec 15 08:57:24 crc kubenswrapper[4876]: I1215 08:57:24.682272 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-ff77b9795-l9tgr" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon" containerID="cri-o://7fe0a4fd68238e9bf261cb93dcffc0197a5f2bca17694cc9a598ad308ed2c162" gracePeriod=30 Dec 15 08:57:24 crc kubenswrapper[4876]: I1215 08:57:24.717194 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f84774b-c535-416f-81cc-d13f396b5ead" path="/var/lib/kubelet/pods/6f84774b-c535-416f-81cc-d13f396b5ead/volumes" Dec 15 08:57:25 crc kubenswrapper[4876]: I1215 08:57:25.175908 4876 scope.go:117] "RemoveContainer" containerID="412d1fc7c07db9fe1b461923a490a486ad3773c9a21809ffbc371a1804994683" Dec 15 08:57:25 crc kubenswrapper[4876]: I1215 08:57:25.197692 4876 scope.go:117] "RemoveContainer" containerID="01e43229f75b3fd7b4153fa581eb61f3e720307a2d0c54cccf8cf327d700d6ef" Dec 15 08:57:28 crc kubenswrapper[4876]: I1215 08:57:28.715706 4876 generic.go:334] "Generic (PLEG): container finished" podID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerID="7fe0a4fd68238e9bf261cb93dcffc0197a5f2bca17694cc9a598ad308ed2c162" exitCode=0 Dec 15 08:57:28 crc kubenswrapper[4876]: I1215 08:57:28.717667 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff77b9795-l9tgr" event={"ID":"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad","Type":"ContainerDied","Data":"7fe0a4fd68238e9bf261cb93dcffc0197a5f2bca17694cc9a598ad308ed2c162"} Dec 15 08:57:29 crc kubenswrapper[4876]: I1215 08:57:29.684800 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-ff77b9795-l9tgr" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.103:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.103:8080: connect: connection refused" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.539940 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-78569d8b45-bcs6r"] Dec 15 08:57:31 crc kubenswrapper[4876]: E1215 08:57:31.540392 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f84774b-c535-416f-81cc-d13f396b5ead" containerName="horizon" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.540410 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f84774b-c535-416f-81cc-d13f396b5ead" containerName="horizon" Dec 15 08:57:31 crc kubenswrapper[4876]: E1215 08:57:31.540440 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f84774b-c535-416f-81cc-d13f396b5ead" containerName="horizon-log" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.540447 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f84774b-c535-416f-81cc-d13f396b5ead" containerName="horizon-log" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.540664 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f84774b-c535-416f-81cc-d13f396b5ead" containerName="horizon" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.540690 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f84774b-c535-416f-81cc-d13f396b5ead" containerName="horizon-log" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.541732 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.565473 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-78569d8b45-bcs6r"] Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.608695 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0067ba26-263c-4df8-8249-74e9b72509ee-config-data\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.609118 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0067ba26-263c-4df8-8249-74e9b72509ee-horizon-secret-key\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.609144 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0067ba26-263c-4df8-8249-74e9b72509ee-scripts\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.609163 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0067ba26-263c-4df8-8249-74e9b72509ee-logs\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.609184 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48m99\" (UniqueName: \"kubernetes.io/projected/0067ba26-263c-4df8-8249-74e9b72509ee-kube-api-access-48m99\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.711136 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0067ba26-263c-4df8-8249-74e9b72509ee-config-data\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.711265 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0067ba26-263c-4df8-8249-74e9b72509ee-horizon-secret-key\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.711286 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0067ba26-263c-4df8-8249-74e9b72509ee-scripts\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.711306 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0067ba26-263c-4df8-8249-74e9b72509ee-logs\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.711326 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48m99\" (UniqueName: \"kubernetes.io/projected/0067ba26-263c-4df8-8249-74e9b72509ee-kube-api-access-48m99\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.712031 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0067ba26-263c-4df8-8249-74e9b72509ee-scripts\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.712124 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0067ba26-263c-4df8-8249-74e9b72509ee-logs\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.712814 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0067ba26-263c-4df8-8249-74e9b72509ee-config-data\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.717406 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0067ba26-263c-4df8-8249-74e9b72509ee-horizon-secret-key\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.729629 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48m99\" (UniqueName: \"kubernetes.io/projected/0067ba26-263c-4df8-8249-74e9b72509ee-kube-api-access-48m99\") pod \"horizon-78569d8b45-bcs6r\" (UID: \"0067ba26-263c-4df8-8249-74e9b72509ee\") " pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:31 crc kubenswrapper[4876]: I1215 08:57:31.866208 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.305968 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-78569d8b45-bcs6r"] Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.521821 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-xnvd2"] Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.524672 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-xnvd2" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.531622 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-xnvd2"] Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.625601 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-7e75-account-create-update-zr4ks"] Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.627212 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-7e75-account-create-update-zr4ks" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.629040 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-operator-scripts\") pod \"heat-db-create-xnvd2\" (UID: \"478cc8c2-0e45-4db3-aba9-4a4c7de64b60\") " pod="openstack/heat-db-create-xnvd2" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.629252 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dql7g\" (UniqueName: \"kubernetes.io/projected/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-kube-api-access-dql7g\") pod \"heat-db-create-xnvd2\" (UID: \"478cc8c2-0e45-4db3-aba9-4a4c7de64b60\") " pod="openstack/heat-db-create-xnvd2" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.629507 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.651315 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-7e75-account-create-update-zr4ks"] Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.731926 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dql7g\" (UniqueName: \"kubernetes.io/projected/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-kube-api-access-dql7g\") pod \"heat-db-create-xnvd2\" (UID: \"478cc8c2-0e45-4db3-aba9-4a4c7de64b60\") " pod="openstack/heat-db-create-xnvd2" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.731996 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt8tc\" (UniqueName: \"kubernetes.io/projected/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-kube-api-access-gt8tc\") pod \"heat-7e75-account-create-update-zr4ks\" (UID: \"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec\") " pod="openstack/heat-7e75-account-create-update-zr4ks" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.732064 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-operator-scripts\") pod \"heat-db-create-xnvd2\" (UID: \"478cc8c2-0e45-4db3-aba9-4a4c7de64b60\") " pod="openstack/heat-db-create-xnvd2" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.732159 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-operator-scripts\") pod \"heat-7e75-account-create-update-zr4ks\" (UID: \"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec\") " pod="openstack/heat-7e75-account-create-update-zr4ks" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.733418 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-operator-scripts\") pod \"heat-db-create-xnvd2\" (UID: \"478cc8c2-0e45-4db3-aba9-4a4c7de64b60\") " pod="openstack/heat-db-create-xnvd2" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.751936 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dql7g\" (UniqueName: \"kubernetes.io/projected/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-kube-api-access-dql7g\") pod \"heat-db-create-xnvd2\" (UID: \"478cc8c2-0e45-4db3-aba9-4a4c7de64b60\") " pod="openstack/heat-db-create-xnvd2" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.768301 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78569d8b45-bcs6r" event={"ID":"0067ba26-263c-4df8-8249-74e9b72509ee","Type":"ContainerStarted","Data":"3a208e4b78749b8886e30ce41bdf7674e2b8cecd76f5081a3bc5c1dafac951c0"} Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.768355 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78569d8b45-bcs6r" event={"ID":"0067ba26-263c-4df8-8249-74e9b72509ee","Type":"ContainerStarted","Data":"573a50842ccb10fbf10bf37c403a9bce2845b17cb4ee44336bd7df673a17ba76"} Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.768368 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78569d8b45-bcs6r" event={"ID":"0067ba26-263c-4df8-8249-74e9b72509ee","Type":"ContainerStarted","Data":"821c1b4bab24dca074a25ea0f123fe17ea36e33cd450728678475cb7e1f8c4c6"} Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.793730 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-78569d8b45-bcs6r" podStartSLOduration=1.793707801 podStartE2EDuration="1.793707801s" podCreationTimestamp="2025-12-15 08:57:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:57:32.786710127 +0000 UTC m=+7578.357853048" watchObservedRunningTime="2025-12-15 08:57:32.793707801 +0000 UTC m=+7578.364850722" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.833674 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt8tc\" (UniqueName: \"kubernetes.io/projected/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-kube-api-access-gt8tc\") pod \"heat-7e75-account-create-update-zr4ks\" (UID: \"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec\") " pod="openstack/heat-7e75-account-create-update-zr4ks" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.836027 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-operator-scripts\") pod \"heat-7e75-account-create-update-zr4ks\" (UID: \"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec\") " pod="openstack/heat-7e75-account-create-update-zr4ks" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.837193 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-operator-scripts\") pod \"heat-7e75-account-create-update-zr4ks\" (UID: \"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec\") " pod="openstack/heat-7e75-account-create-update-zr4ks" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.847335 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-xnvd2" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.852283 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt8tc\" (UniqueName: \"kubernetes.io/projected/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-kube-api-access-gt8tc\") pod \"heat-7e75-account-create-update-zr4ks\" (UID: \"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec\") " pod="openstack/heat-7e75-account-create-update-zr4ks" Dec 15 08:57:32 crc kubenswrapper[4876]: I1215 08:57:32.944553 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-7e75-account-create-update-zr4ks" Dec 15 08:57:33 crc kubenswrapper[4876]: I1215 08:57:33.400556 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-xnvd2"] Dec 15 08:57:33 crc kubenswrapper[4876]: I1215 08:57:33.545638 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-7e75-account-create-update-zr4ks"] Dec 15 08:57:33 crc kubenswrapper[4876]: W1215 08:57:33.549535 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4497a99a_b1f8_4e33_96fa_0e5b8462f1ec.slice/crio-7bac4eeebc15fde3a7471c470169cec72ec5c999ad4c26c4e204a27e6dc8ac95 WatchSource:0}: Error finding container 7bac4eeebc15fde3a7471c470169cec72ec5c999ad4c26c4e204a27e6dc8ac95: Status 404 returned error can't find the container with id 7bac4eeebc15fde3a7471c470169cec72ec5c999ad4c26c4e204a27e6dc8ac95 Dec 15 08:57:33 crc kubenswrapper[4876]: I1215 08:57:33.780241 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-xnvd2" event={"ID":"478cc8c2-0e45-4db3-aba9-4a4c7de64b60","Type":"ContainerStarted","Data":"6369db67ff0505b20006eb6003a3ea728bbd5ce514b5dcc9a6eed99da114916f"} Dec 15 08:57:33 crc kubenswrapper[4876]: I1215 08:57:33.780701 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-xnvd2" event={"ID":"478cc8c2-0e45-4db3-aba9-4a4c7de64b60","Type":"ContainerStarted","Data":"13ab1d863b4b3f530fd0ae92517b1afdb02aaa3cc41d8d7c7116e34d27c5a14d"} Dec 15 08:57:33 crc kubenswrapper[4876]: I1215 08:57:33.785246 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-7e75-account-create-update-zr4ks" event={"ID":"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec","Type":"ContainerStarted","Data":"ddf28cdd9db2efa4271303729b6015563fb33d3fd2db404dd3ea9c717ec039d0"} Dec 15 08:57:33 crc kubenswrapper[4876]: I1215 08:57:33.785333 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-7e75-account-create-update-zr4ks" event={"ID":"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec","Type":"ContainerStarted","Data":"7bac4eeebc15fde3a7471c470169cec72ec5c999ad4c26c4e204a27e6dc8ac95"} Dec 15 08:57:33 crc kubenswrapper[4876]: I1215 08:57:33.802824 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-create-xnvd2" podStartSLOduration=1.8028018399999999 podStartE2EDuration="1.80280184s" podCreationTimestamp="2025-12-15 08:57:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:57:33.796552036 +0000 UTC m=+7579.367694947" watchObservedRunningTime="2025-12-15 08:57:33.80280184 +0000 UTC m=+7579.373944751" Dec 15 08:57:33 crc kubenswrapper[4876]: I1215 08:57:33.822425 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-7e75-account-create-update-zr4ks" podStartSLOduration=1.822402405 podStartE2EDuration="1.822402405s" podCreationTimestamp="2025-12-15 08:57:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:57:33.813179673 +0000 UTC m=+7579.384322594" watchObservedRunningTime="2025-12-15 08:57:33.822402405 +0000 UTC m=+7579.393545316" Dec 15 08:57:34 crc kubenswrapper[4876]: I1215 08:57:34.796214 4876 generic.go:334] "Generic (PLEG): container finished" podID="4497a99a-b1f8-4e33-96fa-0e5b8462f1ec" containerID="ddf28cdd9db2efa4271303729b6015563fb33d3fd2db404dd3ea9c717ec039d0" exitCode=0 Dec 15 08:57:34 crc kubenswrapper[4876]: I1215 08:57:34.796251 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-7e75-account-create-update-zr4ks" event={"ID":"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec","Type":"ContainerDied","Data":"ddf28cdd9db2efa4271303729b6015563fb33d3fd2db404dd3ea9c717ec039d0"} Dec 15 08:57:34 crc kubenswrapper[4876]: I1215 08:57:34.797936 4876 generic.go:334] "Generic (PLEG): container finished" podID="478cc8c2-0e45-4db3-aba9-4a4c7de64b60" containerID="6369db67ff0505b20006eb6003a3ea728bbd5ce514b5dcc9a6eed99da114916f" exitCode=0 Dec 15 08:57:34 crc kubenswrapper[4876]: I1215 08:57:34.797966 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-xnvd2" event={"ID":"478cc8c2-0e45-4db3-aba9-4a4c7de64b60","Type":"ContainerDied","Data":"6369db67ff0505b20006eb6003a3ea728bbd5ce514b5dcc9a6eed99da114916f"} Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.300255 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-7e75-account-create-update-zr4ks" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.307598 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-xnvd2" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.425485 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gt8tc\" (UniqueName: \"kubernetes.io/projected/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-kube-api-access-gt8tc\") pod \"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec\" (UID: \"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec\") " Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.425669 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-operator-scripts\") pod \"478cc8c2-0e45-4db3-aba9-4a4c7de64b60\" (UID: \"478cc8c2-0e45-4db3-aba9-4a4c7de64b60\") " Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.425708 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-operator-scripts\") pod \"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec\" (UID: \"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec\") " Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.425876 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dql7g\" (UniqueName: \"kubernetes.io/projected/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-kube-api-access-dql7g\") pod \"478cc8c2-0e45-4db3-aba9-4a4c7de64b60\" (UID: \"478cc8c2-0e45-4db3-aba9-4a4c7de64b60\") " Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.426261 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4497a99a-b1f8-4e33-96fa-0e5b8462f1ec" (UID: "4497a99a-b1f8-4e33-96fa-0e5b8462f1ec"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.426291 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "478cc8c2-0e45-4db3-aba9-4a4c7de64b60" (UID: "478cc8c2-0e45-4db3-aba9-4a4c7de64b60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.433289 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-kube-api-access-gt8tc" (OuterVolumeSpecName: "kube-api-access-gt8tc") pod "4497a99a-b1f8-4e33-96fa-0e5b8462f1ec" (UID: "4497a99a-b1f8-4e33-96fa-0e5b8462f1ec"). InnerVolumeSpecName "kube-api-access-gt8tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.433349 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-kube-api-access-dql7g" (OuterVolumeSpecName: "kube-api-access-dql7g") pod "478cc8c2-0e45-4db3-aba9-4a4c7de64b60" (UID: "478cc8c2-0e45-4db3-aba9-4a4c7de64b60"). InnerVolumeSpecName "kube-api-access-dql7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.527943 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dql7g\" (UniqueName: \"kubernetes.io/projected/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-kube-api-access-dql7g\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.527978 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gt8tc\" (UniqueName: \"kubernetes.io/projected/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-kube-api-access-gt8tc\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.527988 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/478cc8c2-0e45-4db3-aba9-4a4c7de64b60-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.527996 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.817611 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-xnvd2" event={"ID":"478cc8c2-0e45-4db3-aba9-4a4c7de64b60","Type":"ContainerDied","Data":"13ab1d863b4b3f530fd0ae92517b1afdb02aaa3cc41d8d7c7116e34d27c5a14d"} Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.817678 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13ab1d863b4b3f530fd0ae92517b1afdb02aaa3cc41d8d7c7116e34d27c5a14d" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.817639 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-xnvd2" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.819191 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-7e75-account-create-update-zr4ks" event={"ID":"4497a99a-b1f8-4e33-96fa-0e5b8462f1ec","Type":"ContainerDied","Data":"7bac4eeebc15fde3a7471c470169cec72ec5c999ad4c26c4e204a27e6dc8ac95"} Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.819228 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bac4eeebc15fde3a7471c470169cec72ec5c999ad4c26c4e204a27e6dc8ac95" Dec 15 08:57:36 crc kubenswrapper[4876]: I1215 08:57:36.819344 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-7e75-account-create-update-zr4ks" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.733906 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-mv94m"] Dec 15 08:57:37 crc kubenswrapper[4876]: E1215 08:57:37.734950 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4497a99a-b1f8-4e33-96fa-0e5b8462f1ec" containerName="mariadb-account-create-update" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.734973 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4497a99a-b1f8-4e33-96fa-0e5b8462f1ec" containerName="mariadb-account-create-update" Dec 15 08:57:37 crc kubenswrapper[4876]: E1215 08:57:37.735014 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="478cc8c2-0e45-4db3-aba9-4a4c7de64b60" containerName="mariadb-database-create" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.735026 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="478cc8c2-0e45-4db3-aba9-4a4c7de64b60" containerName="mariadb-database-create" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.735359 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="478cc8c2-0e45-4db3-aba9-4a4c7de64b60" containerName="mariadb-database-create" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.735386 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4497a99a-b1f8-4e33-96fa-0e5b8462f1ec" containerName="mariadb-account-create-update" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.736415 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.738944 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.739155 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-6b7mf" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.771876 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-mv94m"] Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.862978 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-config-data\") pod \"heat-db-sync-mv94m\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.863457 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-combined-ca-bundle\") pod \"heat-db-sync-mv94m\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.863510 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh525\" (UniqueName: \"kubernetes.io/projected/9b5723b0-8528-4595-a010-8a861e73e8a6-kube-api-access-lh525\") pod \"heat-db-sync-mv94m\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.965143 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-combined-ca-bundle\") pod \"heat-db-sync-mv94m\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.965194 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh525\" (UniqueName: \"kubernetes.io/projected/9b5723b0-8528-4595-a010-8a861e73e8a6-kube-api-access-lh525\") pod \"heat-db-sync-mv94m\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.965252 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-config-data\") pod \"heat-db-sync-mv94m\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.970914 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-combined-ca-bundle\") pod \"heat-db-sync-mv94m\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.978994 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-config-data\") pod \"heat-db-sync-mv94m\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:37 crc kubenswrapper[4876]: I1215 08:57:37.986563 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh525\" (UniqueName: \"kubernetes.io/projected/9b5723b0-8528-4595-a010-8a861e73e8a6-kube-api-access-lh525\") pod \"heat-db-sync-mv94m\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:38 crc kubenswrapper[4876]: I1215 08:57:38.059432 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:38 crc kubenswrapper[4876]: I1215 08:57:38.512894 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-mv94m"] Dec 15 08:57:38 crc kubenswrapper[4876]: W1215 08:57:38.517160 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b5723b0_8528_4595_a010_8a861e73e8a6.slice/crio-0f504e3b2177d7a31def795fa7174cc31abe9d5fa21e461548815aad7a6a5f6d WatchSource:0}: Error finding container 0f504e3b2177d7a31def795fa7174cc31abe9d5fa21e461548815aad7a6a5f6d: Status 404 returned error can't find the container with id 0f504e3b2177d7a31def795fa7174cc31abe9d5fa21e461548815aad7a6a5f6d Dec 15 08:57:38 crc kubenswrapper[4876]: I1215 08:57:38.839759 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-mv94m" event={"ID":"9b5723b0-8528-4595-a010-8a861e73e8a6","Type":"ContainerStarted","Data":"0f504e3b2177d7a31def795fa7174cc31abe9d5fa21e461548815aad7a6a5f6d"} Dec 15 08:57:39 crc kubenswrapper[4876]: I1215 08:57:39.684528 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-ff77b9795-l9tgr" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.103:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.103:8080: connect: connection refused" Dec 15 08:57:41 crc kubenswrapper[4876]: I1215 08:57:41.866833 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:41 crc kubenswrapper[4876]: I1215 08:57:41.867057 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:57:46 crc kubenswrapper[4876]: I1215 08:57:46.929960 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-mv94m" event={"ID":"9b5723b0-8528-4595-a010-8a861e73e8a6","Type":"ContainerStarted","Data":"718f7e3398908350747e65abc793f951233f7950528bcfd948460bd4b39d1d66"} Dec 15 08:57:46 crc kubenswrapper[4876]: I1215 08:57:46.947386 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-mv94m" podStartSLOduration=2.07495291 podStartE2EDuration="9.947361761s" podCreationTimestamp="2025-12-15 08:57:37 +0000 UTC" firstStartedPulling="2025-12-15 08:57:38.521619932 +0000 UTC m=+7584.092762843" lastFinishedPulling="2025-12-15 08:57:46.394028783 +0000 UTC m=+7591.965171694" observedRunningTime="2025-12-15 08:57:46.943667185 +0000 UTC m=+7592.514810096" watchObservedRunningTime="2025-12-15 08:57:46.947361761 +0000 UTC m=+7592.518504682" Dec 15 08:57:48 crc kubenswrapper[4876]: I1215 08:57:48.958770 4876 generic.go:334] "Generic (PLEG): container finished" podID="9b5723b0-8528-4595-a010-8a861e73e8a6" containerID="718f7e3398908350747e65abc793f951233f7950528bcfd948460bd4b39d1d66" exitCode=0 Dec 15 08:57:48 crc kubenswrapper[4876]: I1215 08:57:48.958828 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-mv94m" event={"ID":"9b5723b0-8528-4595-a010-8a861e73e8a6","Type":"ContainerDied","Data":"718f7e3398908350747e65abc793f951233f7950528bcfd948460bd4b39d1d66"} Dec 15 08:57:49 crc kubenswrapper[4876]: I1215 08:57:49.685358 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-ff77b9795-l9tgr" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.103:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.103:8080: connect: connection refused" Dec 15 08:57:49 crc kubenswrapper[4876]: I1215 08:57:49.685734 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.330548 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.519603 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-config-data\") pod \"9b5723b0-8528-4595-a010-8a861e73e8a6\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.519710 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-combined-ca-bundle\") pod \"9b5723b0-8528-4595-a010-8a861e73e8a6\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.519744 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh525\" (UniqueName: \"kubernetes.io/projected/9b5723b0-8528-4595-a010-8a861e73e8a6-kube-api-access-lh525\") pod \"9b5723b0-8528-4595-a010-8a861e73e8a6\" (UID: \"9b5723b0-8528-4595-a010-8a861e73e8a6\") " Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.524386 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b5723b0-8528-4595-a010-8a861e73e8a6-kube-api-access-lh525" (OuterVolumeSpecName: "kube-api-access-lh525") pod "9b5723b0-8528-4595-a010-8a861e73e8a6" (UID: "9b5723b0-8528-4595-a010-8a861e73e8a6"). InnerVolumeSpecName "kube-api-access-lh525". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.544578 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b5723b0-8528-4595-a010-8a861e73e8a6" (UID: "9b5723b0-8528-4595-a010-8a861e73e8a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.581286 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-config-data" (OuterVolumeSpecName: "config-data") pod "9b5723b0-8528-4595-a010-8a861e73e8a6" (UID: "9b5723b0-8528-4595-a010-8a861e73e8a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.621808 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.621846 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b5723b0-8528-4595-a010-8a861e73e8a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.621857 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh525\" (UniqueName: \"kubernetes.io/projected/9b5723b0-8528-4595-a010-8a861e73e8a6-kube-api-access-lh525\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.977015 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-mv94m" event={"ID":"9b5723b0-8528-4595-a010-8a861e73e8a6","Type":"ContainerDied","Data":"0f504e3b2177d7a31def795fa7174cc31abe9d5fa21e461548815aad7a6a5f6d"} Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.977060 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f504e3b2177d7a31def795fa7174cc31abe9d5fa21e461548815aad7a6a5f6d" Dec 15 08:57:50 crc kubenswrapper[4876]: I1215 08:57:50.977148 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-mv94m" Dec 15 08:57:51 crc kubenswrapper[4876]: I1215 08:57:51.868897 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-78569d8b45-bcs6r" podUID="0067ba26-263c-4df8-8249-74e9b72509ee" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.108:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.108:8080: connect: connection refused" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.070534 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-75495b79b7-z2zxg"] Dec 15 08:57:52 crc kubenswrapper[4876]: E1215 08:57:52.071009 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b5723b0-8528-4595-a010-8a861e73e8a6" containerName="heat-db-sync" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.071028 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b5723b0-8528-4595-a010-8a861e73e8a6" containerName="heat-db-sync" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.071236 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b5723b0-8528-4595-a010-8a861e73e8a6" containerName="heat-db-sync" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.071886 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.076975 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-6b7mf" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.077197 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.077365 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.097226 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-75495b79b7-z2zxg"] Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.171012 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-5f59b99c48-hfp75"] Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.172668 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.174753 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.193561 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5f59b99c48-hfp75"] Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.233461 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-6dd84dfc-l68t4"] Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.235682 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.238098 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.241947 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6dd84dfc-l68t4"] Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.258282 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-config-data\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.258599 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e02b5464-466b-4b2c-8b17-6f95eb820c4c-config-data-custom\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.258761 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-config-data\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.258882 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-combined-ca-bundle\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.258991 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-combined-ca-bundle\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.259090 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02b5464-466b-4b2c-8b17-6f95eb820c4c-config-data\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.259258 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-config-data-custom\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.259386 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02b5464-466b-4b2c-8b17-6f95eb820c4c-combined-ca-bundle\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.259493 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcxsk\" (UniqueName: \"kubernetes.io/projected/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-kube-api-access-tcxsk\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.259729 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dpl5\" (UniqueName: \"kubernetes.io/projected/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-kube-api-access-9dpl5\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.259845 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79hzh\" (UniqueName: \"kubernetes.io/projected/e02b5464-466b-4b2c-8b17-6f95eb820c4c-kube-api-access-79hzh\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.259975 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-config-data-custom\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362420 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e02b5464-466b-4b2c-8b17-6f95eb820c4c-config-data-custom\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362462 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-config-data\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362513 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-combined-ca-bundle\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362552 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-combined-ca-bundle\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362571 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02b5464-466b-4b2c-8b17-6f95eb820c4c-config-data\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362605 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-config-data-custom\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362648 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02b5464-466b-4b2c-8b17-6f95eb820c4c-combined-ca-bundle\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362668 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcxsk\" (UniqueName: \"kubernetes.io/projected/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-kube-api-access-tcxsk\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362714 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dpl5\" (UniqueName: \"kubernetes.io/projected/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-kube-api-access-9dpl5\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362730 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79hzh\" (UniqueName: \"kubernetes.io/projected/e02b5464-466b-4b2c-8b17-6f95eb820c4c-kube-api-access-79hzh\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362763 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-config-data-custom\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.362807 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-config-data\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.370063 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-combined-ca-bundle\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.371152 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-combined-ca-bundle\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.375955 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e02b5464-466b-4b2c-8b17-6f95eb820c4c-combined-ca-bundle\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.376515 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-config-data-custom\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.376573 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-config-data\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.378883 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e02b5464-466b-4b2c-8b17-6f95eb820c4c-config-data-custom\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.380212 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e02b5464-466b-4b2c-8b17-6f95eb820c4c-config-data\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.380844 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-config-data\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.386884 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcxsk\" (UniqueName: \"kubernetes.io/projected/4fdccad6-fd2e-4b94-a2ad-6292ccfb2312-kube-api-access-tcxsk\") pod \"heat-cfnapi-6dd84dfc-l68t4\" (UID: \"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312\") " pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.387042 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79hzh\" (UniqueName: \"kubernetes.io/projected/e02b5464-466b-4b2c-8b17-6f95eb820c4c-kube-api-access-79hzh\") pod \"heat-api-5f59b99c48-hfp75\" (UID: \"e02b5464-466b-4b2c-8b17-6f95eb820c4c\") " pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.389360 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dpl5\" (UniqueName: \"kubernetes.io/projected/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-kube-api-access-9dpl5\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.393023 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/baa6ee5f-7b6e-479f-882a-68ed0f9c1a23-config-data-custom\") pod \"heat-engine-75495b79b7-z2zxg\" (UID: \"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23\") " pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.402775 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.492990 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.558275 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:52 crc kubenswrapper[4876]: I1215 08:57:52.933985 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-75495b79b7-z2zxg"] Dec 15 08:57:53 crc kubenswrapper[4876]: I1215 08:57:53.008759 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-75495b79b7-z2zxg" event={"ID":"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23","Type":"ContainerStarted","Data":"1b0b8332df7c00b99105e38305bbb1cd59bedf3586a96e8a05a5a093ef0bc75b"} Dec 15 08:57:53 crc kubenswrapper[4876]: I1215 08:57:53.095965 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5f59b99c48-hfp75"] Dec 15 08:57:53 crc kubenswrapper[4876]: I1215 08:57:53.152533 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-6dd84dfc-l68t4"] Dec 15 08:57:53 crc kubenswrapper[4876]: W1215 08:57:53.153223 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fdccad6_fd2e_4b94_a2ad_6292ccfb2312.slice/crio-a08973990ded8526df4d863f9494dc884a68bedf260bc080ebccccfa50e69248 WatchSource:0}: Error finding container a08973990ded8526df4d863f9494dc884a68bedf260bc080ebccccfa50e69248: Status 404 returned error can't find the container with id a08973990ded8526df4d863f9494dc884a68bedf260bc080ebccccfa50e69248 Dec 15 08:57:54 crc kubenswrapper[4876]: I1215 08:57:54.024618 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6dd84dfc-l68t4" event={"ID":"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312","Type":"ContainerStarted","Data":"a08973990ded8526df4d863f9494dc884a68bedf260bc080ebccccfa50e69248"} Dec 15 08:57:54 crc kubenswrapper[4876]: I1215 08:57:54.026051 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5f59b99c48-hfp75" event={"ID":"e02b5464-466b-4b2c-8b17-6f95eb820c4c","Type":"ContainerStarted","Data":"f46c8842364bbc1c088a8aa27901a42a244ed724d513a59fa46a4e26a6531307"} Dec 15 08:57:54 crc kubenswrapper[4876]: I1215 08:57:54.027993 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-75495b79b7-z2zxg" event={"ID":"baa6ee5f-7b6e-479f-882a-68ed0f9c1a23","Type":"ContainerStarted","Data":"95fef7aaf429e0615a6990f34ac355878a493b959151ad8eeb400fb142e95359"} Dec 15 08:57:54 crc kubenswrapper[4876]: I1215 08:57:54.029223 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:57:54 crc kubenswrapper[4876]: I1215 08:57:54.046301 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-75495b79b7-z2zxg" podStartSLOduration=3.046278864 podStartE2EDuration="3.046278864s" podCreationTimestamp="2025-12-15 08:57:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:57:54.042181227 +0000 UTC m=+7599.613324158" watchObservedRunningTime="2025-12-15 08:57:54.046278864 +0000 UTC m=+7599.617421775" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.043954 4876 generic.go:334] "Generic (PLEG): container finished" podID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerID="48eeb68c4a0f84e029e01ea083a76e3f10443e2a9ce8c20f712977a2e69eb3bd" exitCode=137 Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.044047 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff77b9795-l9tgr" event={"ID":"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad","Type":"ContainerDied","Data":"48eeb68c4a0f84e029e01ea083a76e3f10443e2a9ce8c20f712977a2e69eb3bd"} Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.168589 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.316994 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-config-data\") pod \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.317116 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-scripts\") pod \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.317152 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfqh2\" (UniqueName: \"kubernetes.io/projected/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-kube-api-access-dfqh2\") pod \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.317194 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-logs\") pod \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.317323 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-horizon-secret-key\") pod \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\" (UID: \"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad\") " Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.317681 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-logs" (OuterVolumeSpecName: "logs") pod "9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" (UID: "9824b1b0-4e68-4c49-98b8-69cfc57ed8ad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.318058 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.325762 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" (UID: "9824b1b0-4e68-4c49-98b8-69cfc57ed8ad"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.330367 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-kube-api-access-dfqh2" (OuterVolumeSpecName: "kube-api-access-dfqh2") pod "9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" (UID: "9824b1b0-4e68-4c49-98b8-69cfc57ed8ad"). InnerVolumeSpecName "kube-api-access-dfqh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.353983 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-scripts" (OuterVolumeSpecName: "scripts") pod "9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" (UID: "9824b1b0-4e68-4c49-98b8-69cfc57ed8ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.357504 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-config-data" (OuterVolumeSpecName: "config-data") pod "9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" (UID: "9824b1b0-4e68-4c49-98b8-69cfc57ed8ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.420531 4876 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.420573 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.420583 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:55 crc kubenswrapper[4876]: I1215 08:57:55.420593 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfqh2\" (UniqueName: \"kubernetes.io/projected/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad-kube-api-access-dfqh2\") on node \"crc\" DevicePath \"\"" Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.055283 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-6dd84dfc-l68t4" event={"ID":"4fdccad6-fd2e-4b94-a2ad-6292ccfb2312","Type":"ContainerStarted","Data":"273f5071bf0f6f6a33f23390f064c0ce40bbe9c63094496e22bf0f53aafd9352"} Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.055394 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.057272 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5f59b99c48-hfp75" event={"ID":"e02b5464-466b-4b2c-8b17-6f95eb820c4c","Type":"ContainerStarted","Data":"7e6dcc4c5f460e2f66c7e74bc9ad580111ae8d37999ce392fc4eada0650b6250"} Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.057417 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.060027 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-ff77b9795-l9tgr" Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.063174 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-ff77b9795-l9tgr" event={"ID":"9824b1b0-4e68-4c49-98b8-69cfc57ed8ad","Type":"ContainerDied","Data":"16dd7213d1dc74cabf42d6dbedec0ff83601cb944cf578766b7a22bbb1f10f31"} Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.063239 4876 scope.go:117] "RemoveContainer" containerID="7fe0a4fd68238e9bf261cb93dcffc0197a5f2bca17694cc9a598ad308ed2c162" Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.081648 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-6dd84dfc-l68t4" podStartSLOduration=2.528754529 podStartE2EDuration="4.081622284s" podCreationTimestamp="2025-12-15 08:57:52 +0000 UTC" firstStartedPulling="2025-12-15 08:57:53.155434116 +0000 UTC m=+7598.726577027" lastFinishedPulling="2025-12-15 08:57:54.708301871 +0000 UTC m=+7600.279444782" observedRunningTime="2025-12-15 08:57:56.07577557 +0000 UTC m=+7601.646918491" watchObservedRunningTime="2025-12-15 08:57:56.081622284 +0000 UTC m=+7601.652765195" Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.103763 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-5f59b99c48-hfp75" podStartSLOduration=2.521171749 podStartE2EDuration="4.103738616s" podCreationTimestamp="2025-12-15 08:57:52 +0000 UTC" firstStartedPulling="2025-12-15 08:57:53.120373983 +0000 UTC m=+7598.691516894" lastFinishedPulling="2025-12-15 08:57:54.70294084 +0000 UTC m=+7600.274083761" observedRunningTime="2025-12-15 08:57:56.098694393 +0000 UTC m=+7601.669837304" watchObservedRunningTime="2025-12-15 08:57:56.103738616 +0000 UTC m=+7601.674881547" Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.140012 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-ff77b9795-l9tgr"] Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.149382 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-ff77b9795-l9tgr"] Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.249740 4876 scope.go:117] "RemoveContainer" containerID="48eeb68c4a0f84e029e01ea083a76e3f10443e2a9ce8c20f712977a2e69eb3bd" Dec 15 08:57:56 crc kubenswrapper[4876]: I1215 08:57:56.721566 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" path="/var/lib/kubelet/pods/9824b1b0-4e68-4c49-98b8-69cfc57ed8ad/volumes" Dec 15 08:58:03 crc kubenswrapper[4876]: I1215 08:58:03.746153 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:58:04 crc kubenswrapper[4876]: I1215 08:58:04.061007 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-5f59b99c48-hfp75" Dec 15 08:58:04 crc kubenswrapper[4876]: I1215 08:58:04.126523 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-6dd84dfc-l68t4" Dec 15 08:58:05 crc kubenswrapper[4876]: I1215 08:58:05.649524 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-78569d8b45-bcs6r" Dec 15 08:58:05 crc kubenswrapper[4876]: I1215 08:58:05.723578 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-67cc7b585c-xqmvf"] Dec 15 08:58:05 crc kubenswrapper[4876]: I1215 08:58:05.723823 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-67cc7b585c-xqmvf" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon-log" containerID="cri-o://1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a" gracePeriod=30 Dec 15 08:58:05 crc kubenswrapper[4876]: I1215 08:58:05.723938 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-67cc7b585c-xqmvf" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon" containerID="cri-o://b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c" gracePeriod=30 Dec 15 08:58:09 crc kubenswrapper[4876]: I1215 08:58:09.187683 4876 generic.go:334] "Generic (PLEG): container finished" podID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerID="b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c" exitCode=0 Dec 15 08:58:09 crc kubenswrapper[4876]: I1215 08:58:09.187725 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67cc7b585c-xqmvf" event={"ID":"08e91091-a77c-43e3-b8a8-a20b578ad95c","Type":"ContainerDied","Data":"b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c"} Dec 15 08:58:10 crc kubenswrapper[4876]: I1215 08:58:10.442353 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-67cc7b585c-xqmvf" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.105:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.105:8080: connect: connection refused" Dec 15 08:58:12 crc kubenswrapper[4876]: I1215 08:58:12.429273 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-75495b79b7-z2zxg" Dec 15 08:58:20 crc kubenswrapper[4876]: I1215 08:58:20.441362 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-67cc7b585c-xqmvf" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.105:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.105:8080: connect: connection refused" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.742674 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz"] Dec 15 08:58:21 crc kubenswrapper[4876]: E1215 08:58:21.743250 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.743265 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon" Dec 15 08:58:21 crc kubenswrapper[4876]: E1215 08:58:21.743302 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon-log" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.743309 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon-log" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.743522 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.743534 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9824b1b0-4e68-4c49-98b8-69cfc57ed8ad" containerName="horizon-log" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.745146 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.748314 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.754222 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz"] Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.832540 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.832639 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.833448 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8n94\" (UniqueName: \"kubernetes.io/projected/80fe6462-83b6-4b61-a76e-f2db98998c80-kube-api-access-c8n94\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.934860 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.935243 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.935451 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.935411 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8n94\" (UniqueName: \"kubernetes.io/projected/80fe6462-83b6-4b61-a76e-f2db98998c80-kube-api-access-c8n94\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.935695 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:21 crc kubenswrapper[4876]: I1215 08:58:21.959357 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8n94\" (UniqueName: \"kubernetes.io/projected/80fe6462-83b6-4b61-a76e-f2db98998c80-kube-api-access-c8n94\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:22 crc kubenswrapper[4876]: I1215 08:58:22.107076 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:22 crc kubenswrapper[4876]: I1215 08:58:22.534251 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz"] Dec 15 08:58:23 crc kubenswrapper[4876]: I1215 08:58:23.324024 4876 generic.go:334] "Generic (PLEG): container finished" podID="80fe6462-83b6-4b61-a76e-f2db98998c80" containerID="82dcf4da13f4a605cace2c2509d39fe728e417d235289d0e8b5bc5c879fa4e71" exitCode=0 Dec 15 08:58:23 crc kubenswrapper[4876]: I1215 08:58:23.324158 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" event={"ID":"80fe6462-83b6-4b61-a76e-f2db98998c80","Type":"ContainerDied","Data":"82dcf4da13f4a605cace2c2509d39fe728e417d235289d0e8b5bc5c879fa4e71"} Dec 15 08:58:23 crc kubenswrapper[4876]: I1215 08:58:23.324454 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" event={"ID":"80fe6462-83b6-4b61-a76e-f2db98998c80","Type":"ContainerStarted","Data":"980cd0358eafad893aa92a08e403d588f2ebb0ae61a589ca084241f499954303"} Dec 15 08:58:27 crc kubenswrapper[4876]: I1215 08:58:27.364270 4876 generic.go:334] "Generic (PLEG): container finished" podID="80fe6462-83b6-4b61-a76e-f2db98998c80" containerID="e47488633d83dc7e8b2803d1ca9e2647e6edcb8a2cb556853752eac901bee756" exitCode=0 Dec 15 08:58:27 crc kubenswrapper[4876]: I1215 08:58:27.364348 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" event={"ID":"80fe6462-83b6-4b61-a76e-f2db98998c80","Type":"ContainerDied","Data":"e47488633d83dc7e8b2803d1ca9e2647e6edcb8a2cb556853752eac901bee756"} Dec 15 08:58:28 crc kubenswrapper[4876]: I1215 08:58:28.375575 4876 generic.go:334] "Generic (PLEG): container finished" podID="80fe6462-83b6-4b61-a76e-f2db98998c80" containerID="b712d468bf631ba77a9a7feb559696497560c86d39b8e48350e351f5f3f32929" exitCode=0 Dec 15 08:58:28 crc kubenswrapper[4876]: I1215 08:58:28.375783 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" event={"ID":"80fe6462-83b6-4b61-a76e-f2db98998c80","Type":"ContainerDied","Data":"b712d468bf631ba77a9a7feb559696497560c86d39b8e48350e351f5f3f32929"} Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.049283 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8340-account-create-update-shntv"] Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.059025 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-vtq2q"] Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.068860 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8340-account-create-update-shntv"] Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.079412 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-vtq2q"] Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.787720 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.898056 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8n94\" (UniqueName: \"kubernetes.io/projected/80fe6462-83b6-4b61-a76e-f2db98998c80-kube-api-access-c8n94\") pod \"80fe6462-83b6-4b61-a76e-f2db98998c80\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.898276 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-util\") pod \"80fe6462-83b6-4b61-a76e-f2db98998c80\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.898501 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-bundle\") pod \"80fe6462-83b6-4b61-a76e-f2db98998c80\" (UID: \"80fe6462-83b6-4b61-a76e-f2db98998c80\") " Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.900258 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-bundle" (OuterVolumeSpecName: "bundle") pod "80fe6462-83b6-4b61-a76e-f2db98998c80" (UID: "80fe6462-83b6-4b61-a76e-f2db98998c80"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.903923 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80fe6462-83b6-4b61-a76e-f2db98998c80-kube-api-access-c8n94" (OuterVolumeSpecName: "kube-api-access-c8n94") pod "80fe6462-83b6-4b61-a76e-f2db98998c80" (UID: "80fe6462-83b6-4b61-a76e-f2db98998c80"). InnerVolumeSpecName "kube-api-access-c8n94". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:58:29 crc kubenswrapper[4876]: I1215 08:58:29.904081 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-util" (OuterVolumeSpecName: "util") pod "80fe6462-83b6-4b61-a76e-f2db98998c80" (UID: "80fe6462-83b6-4b61-a76e-f2db98998c80"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:58:30 crc kubenswrapper[4876]: I1215 08:58:30.000610 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8n94\" (UniqueName: \"kubernetes.io/projected/80fe6462-83b6-4b61-a76e-f2db98998c80-kube-api-access-c8n94\") on node \"crc\" DevicePath \"\"" Dec 15 08:58:30 crc kubenswrapper[4876]: I1215 08:58:30.000636 4876 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-util\") on node \"crc\" DevicePath \"\"" Dec 15 08:58:30 crc kubenswrapper[4876]: I1215 08:58:30.000645 4876 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/80fe6462-83b6-4b61-a76e-f2db98998c80-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 08:58:30 crc kubenswrapper[4876]: I1215 08:58:30.398094 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" event={"ID":"80fe6462-83b6-4b61-a76e-f2db98998c80","Type":"ContainerDied","Data":"980cd0358eafad893aa92a08e403d588f2ebb0ae61a589ca084241f499954303"} Dec 15 08:58:30 crc kubenswrapper[4876]: I1215 08:58:30.398476 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="980cd0358eafad893aa92a08e403d588f2ebb0ae61a589ca084241f499954303" Dec 15 08:58:30 crc kubenswrapper[4876]: I1215 08:58:30.398252 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz" Dec 15 08:58:30 crc kubenswrapper[4876]: I1215 08:58:30.441938 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-67cc7b585c-xqmvf" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.105:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.105:8080: connect: connection refused" Dec 15 08:58:30 crc kubenswrapper[4876]: I1215 08:58:30.442072 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:58:30 crc kubenswrapper[4876]: I1215 08:58:30.717791 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="197123db-9e54-4e82-be91-7c531f7be3c1" path="/var/lib/kubelet/pods/197123db-9e54-4e82-be91-7c531f7be3c1/volumes" Dec 15 08:58:30 crc kubenswrapper[4876]: I1215 08:58:30.718620 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d29e395-e1dc-4668-8f5d-6e28693c1990" path="/var/lib/kubelet/pods/3d29e395-e1dc-4668-8f5d-6e28693c1990/volumes" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.319856 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.439933 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/08e91091-a77c-43e3-b8a8-a20b578ad95c-horizon-secret-key\") pod \"08e91091-a77c-43e3-b8a8-a20b578ad95c\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.440671 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08e91091-a77c-43e3-b8a8-a20b578ad95c-logs\") pod \"08e91091-a77c-43e3-b8a8-a20b578ad95c\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.440734 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-config-data\") pod \"08e91091-a77c-43e3-b8a8-a20b578ad95c\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.440779 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmmtq\" (UniqueName: \"kubernetes.io/projected/08e91091-a77c-43e3-b8a8-a20b578ad95c-kube-api-access-rmmtq\") pod \"08e91091-a77c-43e3-b8a8-a20b578ad95c\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.440829 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-scripts\") pod \"08e91091-a77c-43e3-b8a8-a20b578ad95c\" (UID: \"08e91091-a77c-43e3-b8a8-a20b578ad95c\") " Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.445008 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e91091-a77c-43e3-b8a8-a20b578ad95c-logs" (OuterVolumeSpecName: "logs") pod "08e91091-a77c-43e3-b8a8-a20b578ad95c" (UID: "08e91091-a77c-43e3-b8a8-a20b578ad95c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.456342 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e91091-a77c-43e3-b8a8-a20b578ad95c-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "08e91091-a77c-43e3-b8a8-a20b578ad95c" (UID: "08e91091-a77c-43e3-b8a8-a20b578ad95c"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.456518 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08e91091-a77c-43e3-b8a8-a20b578ad95c-kube-api-access-rmmtq" (OuterVolumeSpecName: "kube-api-access-rmmtq") pod "08e91091-a77c-43e3-b8a8-a20b578ad95c" (UID: "08e91091-a77c-43e3-b8a8-a20b578ad95c"). InnerVolumeSpecName "kube-api-access-rmmtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.515828 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-config-data" (OuterVolumeSpecName: "config-data") pod "08e91091-a77c-43e3-b8a8-a20b578ad95c" (UID: "08e91091-a77c-43e3-b8a8-a20b578ad95c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.524709 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-scripts" (OuterVolumeSpecName: "scripts") pod "08e91091-a77c-43e3-b8a8-a20b578ad95c" (UID: "08e91091-a77c-43e3-b8a8-a20b578ad95c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.526545 4876 generic.go:334] "Generic (PLEG): container finished" podID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerID="1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a" exitCode=137 Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.526591 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67cc7b585c-xqmvf" event={"ID":"08e91091-a77c-43e3-b8a8-a20b578ad95c","Type":"ContainerDied","Data":"1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a"} Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.526618 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67cc7b585c-xqmvf" event={"ID":"08e91091-a77c-43e3-b8a8-a20b578ad95c","Type":"ContainerDied","Data":"1e77350a88961697bf386abfc4e4581c5dd21d8b426bbd4ee3ecef341586299a"} Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.526634 4876 scope.go:117] "RemoveContainer" containerID="b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.526793 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67cc7b585c-xqmvf" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.542872 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08e91091-a77c-43e3-b8a8-a20b578ad95c-logs\") on node \"crc\" DevicePath \"\"" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.542993 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.543059 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmmtq\" (UniqueName: \"kubernetes.io/projected/08e91091-a77c-43e3-b8a8-a20b578ad95c-kube-api-access-rmmtq\") on node \"crc\" DevicePath \"\"" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.543134 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/08e91091-a77c-43e3-b8a8-a20b578ad95c-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.543196 4876 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/08e91091-a77c-43e3-b8a8-a20b578ad95c-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.636553 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-67cc7b585c-xqmvf"] Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.663703 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-67cc7b585c-xqmvf"] Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.718303 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" path="/var/lib/kubelet/pods/08e91091-a77c-43e3-b8a8-a20b578ad95c/volumes" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.777672 4876 scope.go:117] "RemoveContainer" containerID="1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.838369 4876 scope.go:117] "RemoveContainer" containerID="b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c" Dec 15 08:58:36 crc kubenswrapper[4876]: E1215 08:58:36.839022 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c\": container with ID starting with b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c not found: ID does not exist" containerID="b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.839060 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c"} err="failed to get container status \"b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c\": rpc error: code = NotFound desc = could not find container \"b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c\": container with ID starting with b96294482301a71562735bb65ed7e6f0549b0ff530b6cb042ac889c7b4bb733c not found: ID does not exist" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.839084 4876 scope.go:117] "RemoveContainer" containerID="1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a" Dec 15 08:58:36 crc kubenswrapper[4876]: E1215 08:58:36.843286 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a\": container with ID starting with 1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a not found: ID does not exist" containerID="1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a" Dec 15 08:58:36 crc kubenswrapper[4876]: I1215 08:58:36.843340 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a"} err="failed to get container status \"1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a\": rpc error: code = NotFound desc = could not find container \"1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a\": container with ID starting with 1fc72f6fbfa26aac720b218848e077251d2b6caf4e3e9239eea07ad377b6230a not found: ID does not exist" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.194418 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52"] Dec 15 08:58:39 crc kubenswrapper[4876]: E1215 08:58:39.195572 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80fe6462-83b6-4b61-a76e-f2db98998c80" containerName="pull" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.195597 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="80fe6462-83b6-4b61-a76e-f2db98998c80" containerName="pull" Dec 15 08:58:39 crc kubenswrapper[4876]: E1215 08:58:39.195614 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon-log" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.195622 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon-log" Dec 15 08:58:39 crc kubenswrapper[4876]: E1215 08:58:39.195637 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.195647 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon" Dec 15 08:58:39 crc kubenswrapper[4876]: E1215 08:58:39.195679 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80fe6462-83b6-4b61-a76e-f2db98998c80" containerName="extract" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.195687 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="80fe6462-83b6-4b61-a76e-f2db98998c80" containerName="extract" Dec 15 08:58:39 crc kubenswrapper[4876]: E1215 08:58:39.195699 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80fe6462-83b6-4b61-a76e-f2db98998c80" containerName="util" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.195707 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="80fe6462-83b6-4b61-a76e-f2db98998c80" containerName="util" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.195948 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon-log" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.195972 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e91091-a77c-43e3-b8a8-a20b578ad95c" containerName="horizon" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.195995 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="80fe6462-83b6-4b61-a76e-f2db98998c80" containerName="extract" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.196825 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.199642 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-8mjw7" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.199673 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.200852 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.209374 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52"] Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.294296 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6kcz\" (UniqueName: \"kubernetes.io/projected/806866ac-71bd-4a3d-813e-25cfe673c5ef-kube-api-access-m6kcz\") pod \"obo-prometheus-operator-668cf9dfbb-znp52\" (UID: \"806866ac-71bd-4a3d-813e-25cfe673c5ef\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.320830 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc"] Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.322353 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.327086 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.327183 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-vzc7z" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.335023 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc"] Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.348028 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94"] Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.349312 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.372991 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94"] Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.397389 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6kcz\" (UniqueName: \"kubernetes.io/projected/806866ac-71bd-4a3d-813e-25cfe673c5ef-kube-api-access-m6kcz\") pod \"obo-prometheus-operator-668cf9dfbb-znp52\" (UID: \"806866ac-71bd-4a3d-813e-25cfe673c5ef\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.397458 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/435444da-9b95-4e8b-8e29-f12c328cf54e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-svjfc\" (UID: \"435444da-9b95-4e8b-8e29-f12c328cf54e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.397493 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/435444da-9b95-4e8b-8e29-f12c328cf54e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-svjfc\" (UID: \"435444da-9b95-4e8b-8e29-f12c328cf54e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.423559 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6kcz\" (UniqueName: \"kubernetes.io/projected/806866ac-71bd-4a3d-813e-25cfe673c5ef-kube-api-access-m6kcz\") pod \"obo-prometheus-operator-668cf9dfbb-znp52\" (UID: \"806866ac-71bd-4a3d-813e-25cfe673c5ef\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.500087 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0b76e3c5-1ab5-4f10-8d86-88084a1c678e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-wwr94\" (UID: \"0b76e3c5-1ab5-4f10-8d86-88084a1c678e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.500898 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/435444da-9b95-4e8b-8e29-f12c328cf54e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-svjfc\" (UID: \"435444da-9b95-4e8b-8e29-f12c328cf54e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.500991 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/435444da-9b95-4e8b-8e29-f12c328cf54e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-svjfc\" (UID: \"435444da-9b95-4e8b-8e29-f12c328cf54e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.501370 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0b76e3c5-1ab5-4f10-8d86-88084a1c678e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-wwr94\" (UID: \"0b76e3c5-1ab5-4f10-8d86-88084a1c678e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.505350 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/435444da-9b95-4e8b-8e29-f12c328cf54e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-svjfc\" (UID: \"435444da-9b95-4e8b-8e29-f12c328cf54e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.515118 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/435444da-9b95-4e8b-8e29-f12c328cf54e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-svjfc\" (UID: \"435444da-9b95-4e8b-8e29-f12c328cf54e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.521930 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.551991 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-7vbbj"] Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.555052 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.560013 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-swmg8" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.564500 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.578537 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-7vbbj"] Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.610818 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0b76e3c5-1ab5-4f10-8d86-88084a1c678e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-wwr94\" (UID: \"0b76e3c5-1ab5-4f10-8d86-88084a1c678e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.610996 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0b76e3c5-1ab5-4f10-8d86-88084a1c678e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-wwr94\" (UID: \"0b76e3c5-1ab5-4f10-8d86-88084a1c678e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.616402 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0b76e3c5-1ab5-4f10-8d86-88084a1c678e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-wwr94\" (UID: \"0b76e3c5-1ab5-4f10-8d86-88084a1c678e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.619696 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0b76e3c5-1ab5-4f10-8d86-88084a1c678e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-86658f49c4-wwr94\" (UID: \"0b76e3c5-1ab5-4f10-8d86-88084a1c678e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.646555 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.677462 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.714285 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdtbl\" (UniqueName: \"kubernetes.io/projected/faab3f29-aea5-4f55-af4f-0fa6cb5c1547-kube-api-access-zdtbl\") pod \"observability-operator-d8bb48f5d-7vbbj\" (UID: \"faab3f29-aea5-4f55-af4f-0fa6cb5c1547\") " pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.714418 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/faab3f29-aea5-4f55-af4f-0fa6cb5c1547-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-7vbbj\" (UID: \"faab3f29-aea5-4f55-af4f-0fa6cb5c1547\") " pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.800897 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-948jf"] Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.802446 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-948jf" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.808636 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-9lbs9" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.812933 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-948jf"] Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.818265 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/faab3f29-aea5-4f55-af4f-0fa6cb5c1547-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-7vbbj\" (UID: \"faab3f29-aea5-4f55-af4f-0fa6cb5c1547\") " pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.818513 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdtbl\" (UniqueName: \"kubernetes.io/projected/faab3f29-aea5-4f55-af4f-0fa6cb5c1547-kube-api-access-zdtbl\") pod \"observability-operator-d8bb48f5d-7vbbj\" (UID: \"faab3f29-aea5-4f55-af4f-0fa6cb5c1547\") " pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.832741 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/faab3f29-aea5-4f55-af4f-0fa6cb5c1547-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-7vbbj\" (UID: \"faab3f29-aea5-4f55-af4f-0fa6cb5c1547\") " pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.840657 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdtbl\" (UniqueName: \"kubernetes.io/projected/faab3f29-aea5-4f55-af4f-0fa6cb5c1547-kube-api-access-zdtbl\") pod \"observability-operator-d8bb48f5d-7vbbj\" (UID: \"faab3f29-aea5-4f55-af4f-0fa6cb5c1547\") " pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.920809 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xs8r\" (UniqueName: \"kubernetes.io/projected/d0bafb85-b279-4dcd-88d8-5a077850fe11-kube-api-access-9xs8r\") pod \"perses-operator-5446b9c989-948jf\" (UID: \"d0bafb85-b279-4dcd-88d8-5a077850fe11\") " pod="openshift-operators/perses-operator-5446b9c989-948jf" Dec 15 08:58:39 crc kubenswrapper[4876]: I1215 08:58:39.921298 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/d0bafb85-b279-4dcd-88d8-5a077850fe11-openshift-service-ca\") pod \"perses-operator-5446b9c989-948jf\" (UID: \"d0bafb85-b279-4dcd-88d8-5a077850fe11\") " pod="openshift-operators/perses-operator-5446b9c989-948jf" Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.023295 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xs8r\" (UniqueName: \"kubernetes.io/projected/d0bafb85-b279-4dcd-88d8-5a077850fe11-kube-api-access-9xs8r\") pod \"perses-operator-5446b9c989-948jf\" (UID: \"d0bafb85-b279-4dcd-88d8-5a077850fe11\") " pod="openshift-operators/perses-operator-5446b9c989-948jf" Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.023396 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/d0bafb85-b279-4dcd-88d8-5a077850fe11-openshift-service-ca\") pod \"perses-operator-5446b9c989-948jf\" (UID: \"d0bafb85-b279-4dcd-88d8-5a077850fe11\") " pod="openshift-operators/perses-operator-5446b9c989-948jf" Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.024590 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/d0bafb85-b279-4dcd-88d8-5a077850fe11-openshift-service-ca\") pod \"perses-operator-5446b9c989-948jf\" (UID: \"d0bafb85-b279-4dcd-88d8-5a077850fe11\") " pod="openshift-operators/perses-operator-5446b9c989-948jf" Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.045514 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xs8r\" (UniqueName: \"kubernetes.io/projected/d0bafb85-b279-4dcd-88d8-5a077850fe11-kube-api-access-9xs8r\") pod \"perses-operator-5446b9c989-948jf\" (UID: \"d0bafb85-b279-4dcd-88d8-5a077850fe11\") " pod="openshift-operators/perses-operator-5446b9c989-948jf" Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.066697 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.135958 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-948jf" Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.158136 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52"] Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.406931 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc"] Dec 15 08:58:40 crc kubenswrapper[4876]: W1215 08:58:40.409708 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod435444da_9b95_4e8b_8e29_f12c328cf54e.slice/crio-21ea7863c7088cf8c12a49cae1d0f596238e007c89c1f738b5867bc2cc0d5bbd WatchSource:0}: Error finding container 21ea7863c7088cf8c12a49cae1d0f596238e007c89c1f738b5867bc2cc0d5bbd: Status 404 returned error can't find the container with id 21ea7863c7088cf8c12a49cae1d0f596238e007c89c1f738b5867bc2cc0d5bbd Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.416862 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94"] Dec 15 08:58:40 crc kubenswrapper[4876]: W1215 08:58:40.423298 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b76e3c5_1ab5_4f10_8d86_88084a1c678e.slice/crio-4c262354f4682dfe78a41e69d2e13cc969ed56e06bcd2438c7a3a4ad2e51b6b3 WatchSource:0}: Error finding container 4c262354f4682dfe78a41e69d2e13cc969ed56e06bcd2438c7a3a4ad2e51b6b3: Status 404 returned error can't find the container with id 4c262354f4682dfe78a41e69d2e13cc969ed56e06bcd2438c7a3a4ad2e51b6b3 Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.595075 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" event={"ID":"0b76e3c5-1ab5-4f10-8d86-88084a1c678e","Type":"ContainerStarted","Data":"4c262354f4682dfe78a41e69d2e13cc969ed56e06bcd2438c7a3a4ad2e51b6b3"} Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.596463 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52" event={"ID":"806866ac-71bd-4a3d-813e-25cfe673c5ef","Type":"ContainerStarted","Data":"188cb30f0886c528248963c10601451dc4e178d53ec9ceac73a38e63cf697cab"} Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.599098 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" event={"ID":"435444da-9b95-4e8b-8e29-f12c328cf54e","Type":"ContainerStarted","Data":"21ea7863c7088cf8c12a49cae1d0f596238e007c89c1f738b5867bc2cc0d5bbd"} Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.685812 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-7vbbj"] Dec 15 08:58:40 crc kubenswrapper[4876]: W1215 08:58:40.689307 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfaab3f29_aea5_4f55_af4f_0fa6cb5c1547.slice/crio-5ecaf3991955b9394b4f6a3968da2258f5f3d579a8c9bf409a149612e150b43d WatchSource:0}: Error finding container 5ecaf3991955b9394b4f6a3968da2258f5f3d579a8c9bf409a149612e150b43d: Status 404 returned error can't find the container with id 5ecaf3991955b9394b4f6a3968da2258f5f3d579a8c9bf409a149612e150b43d Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.809043 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-948jf"] Dec 15 08:58:40 crc kubenswrapper[4876]: W1215 08:58:40.809361 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd0bafb85_b279_4dcd_88d8_5a077850fe11.slice/crio-a5b737fb3645a8ec7c7631ce8571d4219b5306002def706ed2ec9e8adc7892ed WatchSource:0}: Error finding container a5b737fb3645a8ec7c7631ce8571d4219b5306002def706ed2ec9e8adc7892ed: Status 404 returned error can't find the container with id a5b737fb3645a8ec7c7631ce8571d4219b5306002def706ed2ec9e8adc7892ed Dec 15 08:58:40 crc kubenswrapper[4876]: I1215 08:58:40.811928 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 08:58:41 crc kubenswrapper[4876]: I1215 08:58:41.044739 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-8r24z"] Dec 15 08:58:41 crc kubenswrapper[4876]: I1215 08:58:41.051997 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-8r24z"] Dec 15 08:58:41 crc kubenswrapper[4876]: I1215 08:58:41.612811 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-948jf" event={"ID":"d0bafb85-b279-4dcd-88d8-5a077850fe11","Type":"ContainerStarted","Data":"a5b737fb3645a8ec7c7631ce8571d4219b5306002def706ed2ec9e8adc7892ed"} Dec 15 08:58:41 crc kubenswrapper[4876]: I1215 08:58:41.614788 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" event={"ID":"faab3f29-aea5-4f55-af4f-0fa6cb5c1547","Type":"ContainerStarted","Data":"5ecaf3991955b9394b4f6a3968da2258f5f3d579a8c9bf409a149612e150b43d"} Dec 15 08:58:42 crc kubenswrapper[4876]: I1215 08:58:42.734613 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83f17d8f-d2c6-4755-af05-832c7eaa4bcc" path="/var/lib/kubelet/pods/83f17d8f-d2c6-4755-af05-832c7eaa4bcc/volumes" Dec 15 08:58:48 crc kubenswrapper[4876]: I1215 08:58:48.743876 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" event={"ID":"0b76e3c5-1ab5-4f10-8d86-88084a1c678e","Type":"ContainerStarted","Data":"f4ce23fd25adb74668f234abb0b6b84d0b6412bae13c277df5d842df118351d2"} Dec 15 08:58:48 crc kubenswrapper[4876]: I1215 08:58:48.760775 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52" event={"ID":"806866ac-71bd-4a3d-813e-25cfe673c5ef","Type":"ContainerStarted","Data":"fe0a52aafe9fcb75d55c63021ae2af284bf0528f5e547a99e1bc8e74b7fab828"} Dec 15 08:58:48 crc kubenswrapper[4876]: I1215 08:58:48.776759 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-wwr94" podStartSLOduration=2.286999482 podStartE2EDuration="9.776742948s" podCreationTimestamp="2025-12-15 08:58:39 +0000 UTC" firstStartedPulling="2025-12-15 08:58:40.432811609 +0000 UTC m=+7646.003954520" lastFinishedPulling="2025-12-15 08:58:47.922555075 +0000 UTC m=+7653.493697986" observedRunningTime="2025-12-15 08:58:48.773302567 +0000 UTC m=+7654.344445498" watchObservedRunningTime="2025-12-15 08:58:48.776742948 +0000 UTC m=+7654.347885859" Dec 15 08:58:48 crc kubenswrapper[4876]: I1215 08:58:48.779197 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-948jf" event={"ID":"d0bafb85-b279-4dcd-88d8-5a077850fe11","Type":"ContainerStarted","Data":"f94cbabb40c23d9b7bdb87126481ee75e1be40026d4d56692268be52a242befd"} Dec 15 08:58:48 crc kubenswrapper[4876]: I1215 08:58:48.780086 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-948jf" Dec 15 08:58:48 crc kubenswrapper[4876]: I1215 08:58:48.797617 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" event={"ID":"435444da-9b95-4e8b-8e29-f12c328cf54e","Type":"ContainerStarted","Data":"a30791bbf4bad5fa64645e3d2d127713a8cad588d6afec2cd46e3cc6eddbd97d"} Dec 15 08:58:48 crc kubenswrapper[4876]: I1215 08:58:48.806689 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-znp52" podStartSLOduration=2.056028006 podStartE2EDuration="9.806672475s" podCreationTimestamp="2025-12-15 08:58:39 +0000 UTC" firstStartedPulling="2025-12-15 08:58:40.174285468 +0000 UTC m=+7645.745428379" lastFinishedPulling="2025-12-15 08:58:47.924929937 +0000 UTC m=+7653.496072848" observedRunningTime="2025-12-15 08:58:48.805253938 +0000 UTC m=+7654.376396869" watchObservedRunningTime="2025-12-15 08:58:48.806672475 +0000 UTC m=+7654.377815386" Dec 15 08:58:48 crc kubenswrapper[4876]: I1215 08:58:48.836252 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-86658f49c4-svjfc" podStartSLOduration=2.321585823 podStartE2EDuration="9.836077399s" podCreationTimestamp="2025-12-15 08:58:39 +0000 UTC" firstStartedPulling="2025-12-15 08:58:40.413509832 +0000 UTC m=+7645.984652743" lastFinishedPulling="2025-12-15 08:58:47.928001408 +0000 UTC m=+7653.499144319" observedRunningTime="2025-12-15 08:58:48.834156979 +0000 UTC m=+7654.405299920" watchObservedRunningTime="2025-12-15 08:58:48.836077399 +0000 UTC m=+7654.407220320" Dec 15 08:58:52 crc kubenswrapper[4876]: I1215 08:58:52.955183 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" event={"ID":"faab3f29-aea5-4f55-af4f-0fa6cb5c1547","Type":"ContainerStarted","Data":"e7cb2486db48e9cf9e715fcef81f048551724948aa47cd05aaa7927b634cad1a"} Dec 15 08:58:52 crc kubenswrapper[4876]: I1215 08:58:52.955770 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" Dec 15 08:58:52 crc kubenswrapper[4876]: I1215 08:58:52.971891 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" Dec 15 08:58:52 crc kubenswrapper[4876]: I1215 08:58:52.978905 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-7vbbj" podStartSLOduration=2.382324371 podStartE2EDuration="13.978882446s" podCreationTimestamp="2025-12-15 08:58:39 +0000 UTC" firstStartedPulling="2025-12-15 08:58:40.692662386 +0000 UTC m=+7646.263805297" lastFinishedPulling="2025-12-15 08:58:52.289220461 +0000 UTC m=+7657.860363372" observedRunningTime="2025-12-15 08:58:52.975742874 +0000 UTC m=+7658.546885805" watchObservedRunningTime="2025-12-15 08:58:52.978882446 +0000 UTC m=+7658.550025367" Dec 15 08:58:52 crc kubenswrapper[4876]: I1215 08:58:52.987564 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-948jf" podStartSLOduration=6.874464379 podStartE2EDuration="13.987539184s" podCreationTimestamp="2025-12-15 08:58:39 +0000 UTC" firstStartedPulling="2025-12-15 08:58:40.811707318 +0000 UTC m=+7646.382850229" lastFinishedPulling="2025-12-15 08:58:47.924782123 +0000 UTC m=+7653.495925034" observedRunningTime="2025-12-15 08:58:48.868634065 +0000 UTC m=+7654.439776976" watchObservedRunningTime="2025-12-15 08:58:52.987539184 +0000 UTC m=+7658.558682105" Dec 15 08:59:00 crc kubenswrapper[4876]: I1215 08:59:00.139712 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-948jf" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.395036 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.395681 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="ac4bc4be-fe90-48ae-ac23-83fb5870105a" containerName="openstackclient" containerID="cri-o://75fddfeed143e85cdbcb6fce3c96effc4face30468aa3851ae1994ec7f4f7191" gracePeriod=2 Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.405390 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.439724 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 15 08:59:03 crc kubenswrapper[4876]: E1215 08:59:03.440128 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac4bc4be-fe90-48ae-ac23-83fb5870105a" containerName="openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.440145 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac4bc4be-fe90-48ae-ac23-83fb5870105a" containerName="openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.440337 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac4bc4be-fe90-48ae-ac23-83fb5870105a" containerName="openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.440999 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.447299 4876 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="ac4bc4be-fe90-48ae-ac23-83fb5870105a" podUID="b7f9b10e-34c0-4e6b-9732-ca34348c4eac" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.481234 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.515373 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv5b8\" (UniqueName: \"kubernetes.io/projected/b7f9b10e-34c0-4e6b-9732-ca34348c4eac-kube-api-access-zv5b8\") pod \"openstackclient\" (UID: \"b7f9b10e-34c0-4e6b-9732-ca34348c4eac\") " pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.515595 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b7f9b10e-34c0-4e6b-9732-ca34348c4eac-openstack-config-secret\") pod \"openstackclient\" (UID: \"b7f9b10e-34c0-4e6b-9732-ca34348c4eac\") " pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.515625 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b7f9b10e-34c0-4e6b-9732-ca34348c4eac-openstack-config\") pod \"openstackclient\" (UID: \"b7f9b10e-34c0-4e6b-9732-ca34348c4eac\") " pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.617930 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b7f9b10e-34c0-4e6b-9732-ca34348c4eac-openstack-config-secret\") pod \"openstackclient\" (UID: \"b7f9b10e-34c0-4e6b-9732-ca34348c4eac\") " pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.617985 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b7f9b10e-34c0-4e6b-9732-ca34348c4eac-openstack-config\") pod \"openstackclient\" (UID: \"b7f9b10e-34c0-4e6b-9732-ca34348c4eac\") " pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.618116 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv5b8\" (UniqueName: \"kubernetes.io/projected/b7f9b10e-34c0-4e6b-9732-ca34348c4eac-kube-api-access-zv5b8\") pod \"openstackclient\" (UID: \"b7f9b10e-34c0-4e6b-9732-ca34348c4eac\") " pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.620407 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b7f9b10e-34c0-4e6b-9732-ca34348c4eac-openstack-config\") pod \"openstackclient\" (UID: \"b7f9b10e-34c0-4e6b-9732-ca34348c4eac\") " pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.625760 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b7f9b10e-34c0-4e6b-9732-ca34348c4eac-openstack-config-secret\") pod \"openstackclient\" (UID: \"b7f9b10e-34c0-4e6b-9732-ca34348c4eac\") " pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.646849 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv5b8\" (UniqueName: \"kubernetes.io/projected/b7f9b10e-34c0-4e6b-9732-ca34348c4eac-kube-api-access-zv5b8\") pod \"openstackclient\" (UID: \"b7f9b10e-34c0-4e6b-9732-ca34348c4eac\") " pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.664968 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.670236 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.676466 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-vww9n" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.680354 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.762522 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.824643 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49pt2\" (UniqueName: \"kubernetes.io/projected/7188e583-ec09-4936-b3bd-e66a6f0c7c3e-kube-api-access-49pt2\") pod \"kube-state-metrics-0\" (UID: \"7188e583-ec09-4936-b3bd-e66a6f0c7c3e\") " pod="openstack/kube-state-metrics-0" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.926099 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49pt2\" (UniqueName: \"kubernetes.io/projected/7188e583-ec09-4936-b3bd-e66a6f0c7c3e-kube-api-access-49pt2\") pod \"kube-state-metrics-0\" (UID: \"7188e583-ec09-4936-b3bd-e66a6f0c7c3e\") " pod="openstack/kube-state-metrics-0" Dec 15 08:59:03 crc kubenswrapper[4876]: I1215 08:59:03.977266 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49pt2\" (UniqueName: \"kubernetes.io/projected/7188e583-ec09-4936-b3bd-e66a6f0c7c3e-kube-api-access-49pt2\") pod \"kube-state-metrics-0\" (UID: \"7188e583-ec09-4936-b3bd-e66a6f0c7c3e\") " pod="openstack/kube-state-metrics-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.068973 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.229480 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-qv5gc"] Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.242876 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-qv5gc"] Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.564762 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.566909 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.570607 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.570801 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-njc4g" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.570918 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.570929 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.571273 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.597748 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.671363 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.671436 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.671585 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.671610 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.671634 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.671703 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69v9t\" (UniqueName: \"kubernetes.io/projected/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-kube-api-access-69v9t\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.671734 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.772261 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="057d3d8d-8e37-4305-82ec-b0e64b327c56" path="/var/lib/kubelet/pods/057d3d8d-8e37-4305-82ec-b0e64b327c56/volumes" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.772989 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.773042 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.773094 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.773278 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.773311 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.773335 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.773404 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69v9t\" (UniqueName: \"kubernetes.io/projected/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-kube-api-access-69v9t\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.773436 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.777441 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.797938 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.811832 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.817945 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.835007 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.856124 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69v9t\" (UniqueName: \"kubernetes.io/projected/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-kube-api-access-69v9t\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:04 crc kubenswrapper[4876]: I1215 08:59:04.856405 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b6f1b67c-1f7c-4748-8530-cf99c3859e6d-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"b6f1b67c-1f7c-4748-8530-cf99c3859e6d\") " pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.002637 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.089079 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e591-account-create-update-skxc6"] Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.096174 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b7f9b10e-34c0-4e6b-9732-ca34348c4eac","Type":"ContainerStarted","Data":"c220140af448048ebcaa7561b590ab65939b8e23338b1530981a969f06dcb8be"} Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.123218 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-e591-account-create-update-skxc6"] Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.165251 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.202485 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.204802 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.212215 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.213063 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.213219 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.213336 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.226583 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-rs2t4" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.231172 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.245997 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.289251 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.289575 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.289630 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.289663 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl6fp\" (UniqueName: \"kubernetes.io/projected/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-kube-api-access-pl6fp\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.289682 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.289744 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8035bbab-8307-4f51-85e4-19969551c789\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8035bbab-8307-4f51-85e4-19969551c789\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.289806 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-config\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.289842 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.393995 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8035bbab-8307-4f51-85e4-19969551c789\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8035bbab-8307-4f51-85e4-19969551c789\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.395021 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-config\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.399813 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-config\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.395223 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.404970 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.405025 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.405165 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.405241 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl6fp\" (UniqueName: \"kubernetes.io/projected/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-kube-api-access-pl6fp\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.405271 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.413449 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.416931 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.417470 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.420735 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.432877 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.457287 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl6fp\" (UniqueName: \"kubernetes.io/projected/6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e-kube-api-access-pl6fp\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.508144 4876 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.508206 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8035bbab-8307-4f51-85e4-19969551c789\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8035bbab-8307-4f51-85e4-19969551c789\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/557ece4bc1d191186116658406a8ddc3db6c91b97211e9ce3940e98fbcdce9a5/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.600327 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8035bbab-8307-4f51-85e4-19969551c789\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8035bbab-8307-4f51-85e4-19969551c789\") pod \"prometheus-metric-storage-0\" (UID: \"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e\") " pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:05 crc kubenswrapper[4876]: E1215 08:59:05.739639 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac4bc4be_fe90_48ae_ac23_83fb5870105a.slice/crio-conmon-75fddfeed143e85cdbcb6fce3c96effc4face30468aa3851ae1994ec7f4f7191.scope\": RecentStats: unable to find data in memory cache]" Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.821396 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 15 08:59:05 crc kubenswrapper[4876]: I1215 08:59:05.884934 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.122063 4876 generic.go:334] "Generic (PLEG): container finished" podID="ac4bc4be-fe90-48ae-ac23-83fb5870105a" containerID="75fddfeed143e85cdbcb6fce3c96effc4face30468aa3851ae1994ec7f4f7191" exitCode=137 Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.126235 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"b6f1b67c-1f7c-4748-8530-cf99c3859e6d","Type":"ContainerStarted","Data":"d13eda10b15d3cc17a28744037b5b4c15be9ae7bd35d36d1a5bc8d431b6de5c3"} Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.131971 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7188e583-ec09-4936-b3bd-e66a6f0c7c3e","Type":"ContainerStarted","Data":"2c8e597b67f5a3efd709ec903e07fd922f243d144d5e4ac9d9b9c05e6617b733"} Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.137324 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b7f9b10e-34c0-4e6b-9732-ca34348c4eac","Type":"ContainerStarted","Data":"f68a337c2371c90921c85202d85caa4cba0c592e914706b24184a062ee548a0d"} Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.167590 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.167568109 podStartE2EDuration="3.167568109s" podCreationTimestamp="2025-12-15 08:59:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 08:59:06.152603036 +0000 UTC m=+7671.723745947" watchObservedRunningTime="2025-12-15 08:59:06.167568109 +0000 UTC m=+7671.738711020" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.377537 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.382023 4876 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="ac4bc4be-fe90-48ae-ac23-83fb5870105a" podUID="b7f9b10e-34c0-4e6b-9732-ca34348c4eac" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.400637 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 15 08:59:06 crc kubenswrapper[4876]: W1215 08:59:06.404736 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6821bc38_0d6b_4c4d_aa4a_6b7da625ba3e.slice/crio-76c325f5c7b3485b1d6c423fb4f190f6f3752354bdc3e119ac5790f5779ee400 WatchSource:0}: Error finding container 76c325f5c7b3485b1d6c423fb4f190f6f3752354bdc3e119ac5790f5779ee400: Status 404 returned error can't find the container with id 76c325f5c7b3485b1d6c423fb4f190f6f3752354bdc3e119ac5790f5779ee400 Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.435477 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config\") pod \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.435708 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78x7k\" (UniqueName: \"kubernetes.io/projected/ac4bc4be-fe90-48ae-ac23-83fb5870105a-kube-api-access-78x7k\") pod \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.435793 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config-secret\") pod \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\" (UID: \"ac4bc4be-fe90-48ae-ac23-83fb5870105a\") " Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.463066 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac4bc4be-fe90-48ae-ac23-83fb5870105a-kube-api-access-78x7k" (OuterVolumeSpecName: "kube-api-access-78x7k") pod "ac4bc4be-fe90-48ae-ac23-83fb5870105a" (UID: "ac4bc4be-fe90-48ae-ac23-83fb5870105a"). InnerVolumeSpecName "kube-api-access-78x7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.483939 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "ac4bc4be-fe90-48ae-ac23-83fb5870105a" (UID: "ac4bc4be-fe90-48ae-ac23-83fb5870105a"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.499607 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "ac4bc4be-fe90-48ae-ac23-83fb5870105a" (UID: "ac4bc4be-fe90-48ae-ac23-83fb5870105a"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.537587 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78x7k\" (UniqueName: \"kubernetes.io/projected/ac4bc4be-fe90-48ae-ac23-83fb5870105a-kube-api-access-78x7k\") on node \"crc\" DevicePath \"\"" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.537631 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.537645 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ac4bc4be-fe90-48ae-ac23-83fb5870105a-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.717244 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac4bc4be-fe90-48ae-ac23-83fb5870105a" path="/var/lib/kubelet/pods/ac4bc4be-fe90-48ae-ac23-83fb5870105a/volumes" Dec 15 08:59:06 crc kubenswrapper[4876]: I1215 08:59:06.718543 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e43b8605-b772-4173-9e18-9b9439fa45e3" path="/var/lib/kubelet/pods/e43b8605-b772-4173-9e18-9b9439fa45e3/volumes" Dec 15 08:59:07 crc kubenswrapper[4876]: I1215 08:59:07.167874 4876 scope.go:117] "RemoveContainer" containerID="75fddfeed143e85cdbcb6fce3c96effc4face30468aa3851ae1994ec7f4f7191" Dec 15 08:59:07 crc kubenswrapper[4876]: I1215 08:59:07.168074 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 15 08:59:07 crc kubenswrapper[4876]: I1215 08:59:07.173450 4876 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="ac4bc4be-fe90-48ae-ac23-83fb5870105a" podUID="b7f9b10e-34c0-4e6b-9732-ca34348c4eac" Dec 15 08:59:07 crc kubenswrapper[4876]: I1215 08:59:07.193929 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7188e583-ec09-4936-b3bd-e66a6f0c7c3e","Type":"ContainerStarted","Data":"eed964f6b92f81d0c00fc54dd0af660226e22caa29419af0a028c4e237c8af10"} Dec 15 08:59:07 crc kubenswrapper[4876]: I1215 08:59:07.195186 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 15 08:59:07 crc kubenswrapper[4876]: I1215 08:59:07.221083 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e","Type":"ContainerStarted","Data":"76c325f5c7b3485b1d6c423fb4f190f6f3752354bdc3e119ac5790f5779ee400"} Dec 15 08:59:07 crc kubenswrapper[4876]: I1215 08:59:07.249863 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.742836165 podStartE2EDuration="4.249843524s" podCreationTimestamp="2025-12-15 08:59:03 +0000 UTC" firstStartedPulling="2025-12-15 08:59:05.284203169 +0000 UTC m=+7670.855346080" lastFinishedPulling="2025-12-15 08:59:06.791210528 +0000 UTC m=+7672.362353439" observedRunningTime="2025-12-15 08:59:07.232721843 +0000 UTC m=+7672.803864764" watchObservedRunningTime="2025-12-15 08:59:07.249843524 +0000 UTC m=+7672.820986435" Dec 15 08:59:12 crc kubenswrapper[4876]: I1215 08:59:12.294622 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e","Type":"ContainerStarted","Data":"c135ca49bf7b528bcdfc2d5ffd8b35c301b39e6cc69495073f3efa4552b6b191"} Dec 15 08:59:12 crc kubenswrapper[4876]: I1215 08:59:12.316271 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"b6f1b67c-1f7c-4748-8530-cf99c3859e6d","Type":"ContainerStarted","Data":"9b718ffbd0770e2651f8d6c36950b094b7f8f00d9d6e4aaa8a1ed9991b9a1929"} Dec 15 08:59:14 crc kubenswrapper[4876]: I1215 08:59:14.041390 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-pf7pp"] Dec 15 08:59:14 crc kubenswrapper[4876]: I1215 08:59:14.051554 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-pf7pp"] Dec 15 08:59:14 crc kubenswrapper[4876]: I1215 08:59:14.074117 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 15 08:59:14 crc kubenswrapper[4876]: I1215 08:59:14.721542 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3298a876-f36d-4d28-8203-beec177942e8" path="/var/lib/kubelet/pods/3298a876-f36d-4d28-8203-beec177942e8/volumes" Dec 15 08:59:18 crc kubenswrapper[4876]: I1215 08:59:18.370243 4876 generic.go:334] "Generic (PLEG): container finished" podID="b6f1b67c-1f7c-4748-8530-cf99c3859e6d" containerID="9b718ffbd0770e2651f8d6c36950b094b7f8f00d9d6e4aaa8a1ed9991b9a1929" exitCode=0 Dec 15 08:59:18 crc kubenswrapper[4876]: I1215 08:59:18.370327 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"b6f1b67c-1f7c-4748-8530-cf99c3859e6d","Type":"ContainerDied","Data":"9b718ffbd0770e2651f8d6c36950b094b7f8f00d9d6e4aaa8a1ed9991b9a1929"} Dec 15 08:59:19 crc kubenswrapper[4876]: I1215 08:59:19.381731 4876 generic.go:334] "Generic (PLEG): container finished" podID="6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e" containerID="c135ca49bf7b528bcdfc2d5ffd8b35c301b39e6cc69495073f3efa4552b6b191" exitCode=0 Dec 15 08:59:19 crc kubenswrapper[4876]: I1215 08:59:19.381837 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e","Type":"ContainerDied","Data":"c135ca49bf7b528bcdfc2d5ffd8b35c301b39e6cc69495073f3efa4552b6b191"} Dec 15 08:59:21 crc kubenswrapper[4876]: I1215 08:59:21.452035 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"b6f1b67c-1f7c-4748-8530-cf99c3859e6d","Type":"ContainerStarted","Data":"7505097674c71de4cf2762265ee662a7341b0670c85a426622f4c6b887b385a6"} Dec 15 08:59:24 crc kubenswrapper[4876]: I1215 08:59:24.480341 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"b6f1b67c-1f7c-4748-8530-cf99c3859e6d","Type":"ContainerStarted","Data":"4ba13cbb87fa75aa337972c19a2591bf35ea6a3488d37215b8a5432c4b3b0977"} Dec 15 08:59:24 crc kubenswrapper[4876]: I1215 08:59:24.480929 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:24 crc kubenswrapper[4876]: I1215 08:59:24.485486 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Dec 15 08:59:24 crc kubenswrapper[4876]: I1215 08:59:24.504343 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=5.168971116 podStartE2EDuration="20.504323859s" podCreationTimestamp="2025-12-15 08:59:04 +0000 UTC" firstStartedPulling="2025-12-15 08:59:05.825708455 +0000 UTC m=+7671.396851366" lastFinishedPulling="2025-12-15 08:59:21.161061198 +0000 UTC m=+7686.732204109" observedRunningTime="2025-12-15 08:59:24.502270195 +0000 UTC m=+7690.073413126" watchObservedRunningTime="2025-12-15 08:59:24.504323859 +0000 UTC m=+7690.075466780" Dec 15 08:59:25 crc kubenswrapper[4876]: I1215 08:59:25.382635 4876 scope.go:117] "RemoveContainer" containerID="11f5811760297b22c6d27f33c565be3bc8328c4da29920e6b3029722b866ddfe" Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.323375 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.324207 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.684526 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mv2h5"] Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.686529 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.719835 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mv2h5"] Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.781294 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brbwj\" (UniqueName: \"kubernetes.io/projected/c840d89b-b546-4e6a-a7df-742dd265319a-kube-api-access-brbwj\") pod \"community-operators-mv2h5\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.781676 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-catalog-content\") pod \"community-operators-mv2h5\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.781752 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-utilities\") pod \"community-operators-mv2h5\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.883552 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-utilities\") pod \"community-operators-mv2h5\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.883789 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brbwj\" (UniqueName: \"kubernetes.io/projected/c840d89b-b546-4e6a-a7df-742dd265319a-kube-api-access-brbwj\") pod \"community-operators-mv2h5\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.883827 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-catalog-content\") pod \"community-operators-mv2h5\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.884050 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-utilities\") pod \"community-operators-mv2h5\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:27 crc kubenswrapper[4876]: I1215 08:59:27.884175 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-catalog-content\") pod \"community-operators-mv2h5\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:28 crc kubenswrapper[4876]: I1215 08:59:28.116735 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brbwj\" (UniqueName: \"kubernetes.io/projected/c840d89b-b546-4e6a-a7df-742dd265319a-kube-api-access-brbwj\") pod \"community-operators-mv2h5\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:28 crc kubenswrapper[4876]: I1215 08:59:28.331699 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:28 crc kubenswrapper[4876]: I1215 08:59:28.399345 4876 scope.go:117] "RemoveContainer" containerID="dc4a820d75ff6692477b255b33ff62ade3561aa2ecbb69442ba67fd6fefcc954" Dec 15 08:59:28 crc kubenswrapper[4876]: I1215 08:59:28.534926 4876 scope.go:117] "RemoveContainer" containerID="dc733a890652e872bd97e21c9c6a0b252cd47fa7178409e67a97a5dee6c941bd" Dec 15 08:59:28 crc kubenswrapper[4876]: I1215 08:59:28.575944 4876 scope.go:117] "RemoveContainer" containerID="33dd809b89f1bfe605f93f05022ff7a891bd1d14a3b297976f6c0aa5d01202cc" Dec 15 08:59:28 crc kubenswrapper[4876]: I1215 08:59:28.657143 4876 scope.go:117] "RemoveContainer" containerID="34121a2a97e30f7666a49f517158304048f0f49dd044c794c0664b330eb16489" Dec 15 08:59:28 crc kubenswrapper[4876]: I1215 08:59:28.685276 4876 scope.go:117] "RemoveContainer" containerID="dcd679b38ab4d7a47a0d07234daa4da9c877934d55720fe9f80c2c80e06bdafe" Dec 15 08:59:28 crc kubenswrapper[4876]: I1215 08:59:28.980494 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mv2h5"] Dec 15 08:59:28 crc kubenswrapper[4876]: W1215 08:59:28.989360 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc840d89b_b546_4e6a_a7df_742dd265319a.slice/crio-f739624408ab5e2f1302a1eca26939c1a3a8285ba388c18e9c104d91b1d6691f WatchSource:0}: Error finding container f739624408ab5e2f1302a1eca26939c1a3a8285ba388c18e9c104d91b1d6691f: Status 404 returned error can't find the container with id f739624408ab5e2f1302a1eca26939c1a3a8285ba388c18e9c104d91b1d6691f Dec 15 08:59:29 crc kubenswrapper[4876]: I1215 08:59:29.536082 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e","Type":"ContainerStarted","Data":"0643e9b9b30fb03b77033ffa00681ea90283e1863f186923413d6c0749428127"} Dec 15 08:59:29 crc kubenswrapper[4876]: I1215 08:59:29.537668 4876 generic.go:334] "Generic (PLEG): container finished" podID="c840d89b-b546-4e6a-a7df-742dd265319a" containerID="3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853" exitCode=0 Dec 15 08:59:29 crc kubenswrapper[4876]: I1215 08:59:29.537719 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mv2h5" event={"ID":"c840d89b-b546-4e6a-a7df-742dd265319a","Type":"ContainerDied","Data":"3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853"} Dec 15 08:59:29 crc kubenswrapper[4876]: I1215 08:59:29.537767 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mv2h5" event={"ID":"c840d89b-b546-4e6a-a7df-742dd265319a","Type":"ContainerStarted","Data":"f739624408ab5e2f1302a1eca26939c1a3a8285ba388c18e9c104d91b1d6691f"} Dec 15 08:59:32 crc kubenswrapper[4876]: I1215 08:59:32.569901 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e","Type":"ContainerStarted","Data":"32dfba84bc5801e2f047560f01097b9fd5f7f555c1a95c80c07e00dfd6bdbe98"} Dec 15 08:59:32 crc kubenswrapper[4876]: I1215 08:59:32.572822 4876 generic.go:334] "Generic (PLEG): container finished" podID="c840d89b-b546-4e6a-a7df-742dd265319a" containerID="27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093" exitCode=0 Dec 15 08:59:32 crc kubenswrapper[4876]: I1215 08:59:32.572870 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mv2h5" event={"ID":"c840d89b-b546-4e6a-a7df-742dd265319a","Type":"ContainerDied","Data":"27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093"} Dec 15 08:59:34 crc kubenswrapper[4876]: I1215 08:59:34.594229 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mv2h5" event={"ID":"c840d89b-b546-4e6a-a7df-742dd265319a","Type":"ContainerStarted","Data":"743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3"} Dec 15 08:59:34 crc kubenswrapper[4876]: I1215 08:59:34.617264 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mv2h5" podStartSLOduration=3.60017885 podStartE2EDuration="7.617244769s" podCreationTimestamp="2025-12-15 08:59:27 +0000 UTC" firstStartedPulling="2025-12-15 08:59:29.539895244 +0000 UTC m=+7695.111038155" lastFinishedPulling="2025-12-15 08:59:33.556961163 +0000 UTC m=+7699.128104074" observedRunningTime="2025-12-15 08:59:34.609387052 +0000 UTC m=+7700.180529953" watchObservedRunningTime="2025-12-15 08:59:34.617244769 +0000 UTC m=+7700.188387670" Dec 15 08:59:36 crc kubenswrapper[4876]: I1215 08:59:36.615262 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e","Type":"ContainerStarted","Data":"d69186a2a784eaad8640d919b5dede1a536bb3c62dd08af42474d0932b7b8c13"} Dec 15 08:59:36 crc kubenswrapper[4876]: I1215 08:59:36.643858 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=2.967819963 podStartE2EDuration="32.643821497s" podCreationTimestamp="2025-12-15 08:59:04 +0000 UTC" firstStartedPulling="2025-12-15 08:59:06.406643269 +0000 UTC m=+7671.977786170" lastFinishedPulling="2025-12-15 08:59:36.082644793 +0000 UTC m=+7701.653787704" observedRunningTime="2025-12-15 08:59:36.637063629 +0000 UTC m=+7702.208206550" watchObservedRunningTime="2025-12-15 08:59:36.643821497 +0000 UTC m=+7702.214964408" Dec 15 08:59:38 crc kubenswrapper[4876]: I1215 08:59:38.332725 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:38 crc kubenswrapper[4876]: I1215 08:59:38.333318 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:38 crc kubenswrapper[4876]: I1215 08:59:38.377584 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:38 crc kubenswrapper[4876]: I1215 08:59:38.677387 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:39 crc kubenswrapper[4876]: I1215 08:59:39.468409 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mv2h5"] Dec 15 08:59:40 crc kubenswrapper[4876]: I1215 08:59:40.649910 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mv2h5" podUID="c840d89b-b546-4e6a-a7df-742dd265319a" containerName="registry-server" containerID="cri-o://743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3" gracePeriod=2 Dec 15 08:59:40 crc kubenswrapper[4876]: I1215 08:59:40.896554 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.200883 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.355683 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-catalog-content\") pod \"c840d89b-b546-4e6a-a7df-742dd265319a\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.355759 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-utilities\") pod \"c840d89b-b546-4e6a-a7df-742dd265319a\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.355837 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brbwj\" (UniqueName: \"kubernetes.io/projected/c840d89b-b546-4e6a-a7df-742dd265319a-kube-api-access-brbwj\") pod \"c840d89b-b546-4e6a-a7df-742dd265319a\" (UID: \"c840d89b-b546-4e6a-a7df-742dd265319a\") " Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.356935 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-utilities" (OuterVolumeSpecName: "utilities") pod "c840d89b-b546-4e6a-a7df-742dd265319a" (UID: "c840d89b-b546-4e6a-a7df-742dd265319a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.369029 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c840d89b-b546-4e6a-a7df-742dd265319a-kube-api-access-brbwj" (OuterVolumeSpecName: "kube-api-access-brbwj") pod "c840d89b-b546-4e6a-a7df-742dd265319a" (UID: "c840d89b-b546-4e6a-a7df-742dd265319a"). InnerVolumeSpecName "kube-api-access-brbwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.412767 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c840d89b-b546-4e6a-a7df-742dd265319a" (UID: "c840d89b-b546-4e6a-a7df-742dd265319a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.458277 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.458312 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c840d89b-b546-4e6a-a7df-742dd265319a-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.458322 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brbwj\" (UniqueName: \"kubernetes.io/projected/c840d89b-b546-4e6a-a7df-742dd265319a-kube-api-access-brbwj\") on node \"crc\" DevicePath \"\"" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.661081 4876 generic.go:334] "Generic (PLEG): container finished" podID="c840d89b-b546-4e6a-a7df-742dd265319a" containerID="743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3" exitCode=0 Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.661139 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mv2h5" event={"ID":"c840d89b-b546-4e6a-a7df-742dd265319a","Type":"ContainerDied","Data":"743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3"} Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.661166 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mv2h5" event={"ID":"c840d89b-b546-4e6a-a7df-742dd265319a","Type":"ContainerDied","Data":"f739624408ab5e2f1302a1eca26939c1a3a8285ba388c18e9c104d91b1d6691f"} Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.661183 4876 scope.go:117] "RemoveContainer" containerID="743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.661193 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mv2h5" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.684702 4876 scope.go:117] "RemoveContainer" containerID="27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.716045 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mv2h5"] Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.735427 4876 scope.go:117] "RemoveContainer" containerID="3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.740997 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mv2h5"] Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.757551 4876 scope.go:117] "RemoveContainer" containerID="743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3" Dec 15 08:59:41 crc kubenswrapper[4876]: E1215 08:59:41.758061 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3\": container with ID starting with 743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3 not found: ID does not exist" containerID="743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.758115 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3"} err="failed to get container status \"743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3\": rpc error: code = NotFound desc = could not find container \"743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3\": container with ID starting with 743a5cea54140ad30e011fff13391ca375895eb311f78efc446b467dc2b74ec3 not found: ID does not exist" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.758138 4876 scope.go:117] "RemoveContainer" containerID="27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093" Dec 15 08:59:41 crc kubenswrapper[4876]: E1215 08:59:41.758531 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093\": container with ID starting with 27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093 not found: ID does not exist" containerID="27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.758556 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093"} err="failed to get container status \"27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093\": rpc error: code = NotFound desc = could not find container \"27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093\": container with ID starting with 27129ddcc768a001b21da93e1e5c3f62638ebee46903493963c5272f099f5093 not found: ID does not exist" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.758570 4876 scope.go:117] "RemoveContainer" containerID="3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853" Dec 15 08:59:41 crc kubenswrapper[4876]: E1215 08:59:41.758804 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853\": container with ID starting with 3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853 not found: ID does not exist" containerID="3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853" Dec 15 08:59:41 crc kubenswrapper[4876]: I1215 08:59:41.758835 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853"} err="failed to get container status \"3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853\": rpc error: code = NotFound desc = could not find container \"3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853\": container with ID starting with 3b9b5b7ffeafbf25f86a7baf08a02988eaae4ea5b5517bb87472631bf8858853 not found: ID does not exist" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.065625 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 15 08:59:42 crc kubenswrapper[4876]: E1215 08:59:42.066264 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c840d89b-b546-4e6a-a7df-742dd265319a" containerName="registry-server" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.066284 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c840d89b-b546-4e6a-a7df-742dd265319a" containerName="registry-server" Dec 15 08:59:42 crc kubenswrapper[4876]: E1215 08:59:42.066293 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c840d89b-b546-4e6a-a7df-742dd265319a" containerName="extract-utilities" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.066299 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c840d89b-b546-4e6a-a7df-742dd265319a" containerName="extract-utilities" Dec 15 08:59:42 crc kubenswrapper[4876]: E1215 08:59:42.066336 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c840d89b-b546-4e6a-a7df-742dd265319a" containerName="extract-content" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.066342 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c840d89b-b546-4e6a-a7df-742dd265319a" containerName="extract-content" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.066504 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c840d89b-b546-4e6a-a7df-742dd265319a" containerName="registry-server" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.069398 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.071962 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.072082 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.086359 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.172780 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqpqz\" (UniqueName: \"kubernetes.io/projected/70e73b46-01ac-4f61-9927-f90698e68866-kube-api-access-pqpqz\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.172845 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.172866 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-log-httpd\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.172880 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-scripts\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.173019 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-config-data\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.173093 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.173166 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-run-httpd\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.274903 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqpqz\" (UniqueName: \"kubernetes.io/projected/70e73b46-01ac-4f61-9927-f90698e68866-kube-api-access-pqpqz\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.274970 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.275016 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-log-httpd\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.275045 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-scripts\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.275601 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-log-httpd\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.275986 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-config-data\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.276074 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.276142 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-run-httpd\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.276447 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-run-httpd\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.280267 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-config-data\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.280806 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-scripts\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.281466 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.281481 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.293246 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqpqz\" (UniqueName: \"kubernetes.io/projected/70e73b46-01ac-4f61-9927-f90698e68866-kube-api-access-pqpqz\") pod \"ceilometer-0\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.398070 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.718027 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c840d89b-b546-4e6a-a7df-742dd265319a" path="/var/lib/kubelet/pods/c840d89b-b546-4e6a-a7df-742dd265319a/volumes" Dec 15 08:59:42 crc kubenswrapper[4876]: I1215 08:59:42.950966 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 08:59:42 crc kubenswrapper[4876]: W1215 08:59:42.965365 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70e73b46_01ac_4f61_9927_f90698e68866.slice/crio-3c2e9e384ef0f7e369dc51700b5e449d39b5aa31840cccacae597f6a17264cf6 WatchSource:0}: Error finding container 3c2e9e384ef0f7e369dc51700b5e449d39b5aa31840cccacae597f6a17264cf6: Status 404 returned error can't find the container with id 3c2e9e384ef0f7e369dc51700b5e449d39b5aa31840cccacae597f6a17264cf6 Dec 15 08:59:43 crc kubenswrapper[4876]: I1215 08:59:43.684761 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70e73b46-01ac-4f61-9927-f90698e68866","Type":"ContainerStarted","Data":"3c2e9e384ef0f7e369dc51700b5e449d39b5aa31840cccacae597f6a17264cf6"} Dec 15 08:59:48 crc kubenswrapper[4876]: I1215 08:59:48.743880 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70e73b46-01ac-4f61-9927-f90698e68866","Type":"ContainerStarted","Data":"6a7d6d58bd0e2798d17c3dc4fc6b4d4aa5c03cf7126e608cd194c7a04f9e8bb1"} Dec 15 08:59:49 crc kubenswrapper[4876]: I1215 08:59:49.754942 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70e73b46-01ac-4f61-9927-f90698e68866","Type":"ContainerStarted","Data":"68d7c1791a7704e8d8aa9a451458e780a13836d03dca16ab65e310c344ced490"} Dec 15 08:59:50 crc kubenswrapper[4876]: I1215 08:59:50.885921 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:50 crc kubenswrapper[4876]: I1215 08:59:50.889722 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:51 crc kubenswrapper[4876]: I1215 08:59:51.783581 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70e73b46-01ac-4f61-9927-f90698e68866","Type":"ContainerStarted","Data":"62965a326bc37bb46dc030676c87e17319ce6dcfc9f650ce0da76b5b8a236d6e"} Dec 15 08:59:51 crc kubenswrapper[4876]: I1215 08:59:51.785053 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 15 08:59:55 crc kubenswrapper[4876]: I1215 08:59:55.048766 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-nvqqs"] Dec 15 08:59:55 crc kubenswrapper[4876]: I1215 08:59:55.061162 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-34c5-account-create-update-vs5gb"] Dec 15 08:59:55 crc kubenswrapper[4876]: I1215 08:59:55.072361 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-nvqqs"] Dec 15 08:59:55 crc kubenswrapper[4876]: I1215 08:59:55.081698 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-34c5-account-create-update-vs5gb"] Dec 15 08:59:55 crc kubenswrapper[4876]: I1215 08:59:55.829339 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70e73b46-01ac-4f61-9927-f90698e68866","Type":"ContainerStarted","Data":"d28b2b05906d85e75013210282360d4c91e775a51c541a1c69caf8f27df19617"} Dec 15 08:59:55 crc kubenswrapper[4876]: I1215 08:59:55.831266 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 15 08:59:55 crc kubenswrapper[4876]: I1215 08:59:55.856297 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.215712144 podStartE2EDuration="13.856279416s" podCreationTimestamp="2025-12-15 08:59:42 +0000 UTC" firstStartedPulling="2025-12-15 08:59:42.967846542 +0000 UTC m=+7708.538989453" lastFinishedPulling="2025-12-15 08:59:54.608413814 +0000 UTC m=+7720.179556725" observedRunningTime="2025-12-15 08:59:55.846900469 +0000 UTC m=+7721.418043390" watchObservedRunningTime="2025-12-15 08:59:55.856279416 +0000 UTC m=+7721.427422317" Dec 15 08:59:56 crc kubenswrapper[4876]: I1215 08:59:56.719177 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="687b9ec8-76f9-4e67-9dc6-f55efb50f9f6" path="/var/lib/kubelet/pods/687b9ec8-76f9-4e67-9dc6-f55efb50f9f6/volumes" Dec 15 08:59:56 crc kubenswrapper[4876]: I1215 08:59:56.719909 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb7c283e-6fad-4c7f-bfb7-703e63bf9be4" path="/var/lib/kubelet/pods/eb7c283e-6fad-4c7f-bfb7-703e63bf9be4/volumes" Dec 15 08:59:57 crc kubenswrapper[4876]: I1215 08:59:57.323002 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 08:59:57 crc kubenswrapper[4876]: I1215 08:59:57.323385 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.627415 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-qzrjp"] Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.629294 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-qzrjp" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.637272 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-qzrjp"] Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.727824 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-3647-account-create-update-vw4kb"] Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.729035 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-3647-account-create-update-vw4kb" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.732359 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.748512 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-3647-account-create-update-vw4kb"] Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.761804 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbd6be73-ea8e-44e0-a951-970e42acf3cf-operator-scripts\") pod \"aodh-db-create-qzrjp\" (UID: \"bbd6be73-ea8e-44e0-a951-970e42acf3cf\") " pod="openstack/aodh-db-create-qzrjp" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.762005 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df89h\" (UniqueName: \"kubernetes.io/projected/bbd6be73-ea8e-44e0-a951-970e42acf3cf-kube-api-access-df89h\") pod \"aodh-db-create-qzrjp\" (UID: \"bbd6be73-ea8e-44e0-a951-970e42acf3cf\") " pod="openstack/aodh-db-create-qzrjp" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.863485 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5prrr\" (UniqueName: \"kubernetes.io/projected/b68576a6-b067-44ef-8322-a9c902ca5a86-kube-api-access-5prrr\") pod \"aodh-3647-account-create-update-vw4kb\" (UID: \"b68576a6-b067-44ef-8322-a9c902ca5a86\") " pod="openstack/aodh-3647-account-create-update-vw4kb" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.863615 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b68576a6-b067-44ef-8322-a9c902ca5a86-operator-scripts\") pod \"aodh-3647-account-create-update-vw4kb\" (UID: \"b68576a6-b067-44ef-8322-a9c902ca5a86\") " pod="openstack/aodh-3647-account-create-update-vw4kb" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.863662 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df89h\" (UniqueName: \"kubernetes.io/projected/bbd6be73-ea8e-44e0-a951-970e42acf3cf-kube-api-access-df89h\") pod \"aodh-db-create-qzrjp\" (UID: \"bbd6be73-ea8e-44e0-a951-970e42acf3cf\") " pod="openstack/aodh-db-create-qzrjp" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.863770 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbd6be73-ea8e-44e0-a951-970e42acf3cf-operator-scripts\") pod \"aodh-db-create-qzrjp\" (UID: \"bbd6be73-ea8e-44e0-a951-970e42acf3cf\") " pod="openstack/aodh-db-create-qzrjp" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.864557 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbd6be73-ea8e-44e0-a951-970e42acf3cf-operator-scripts\") pod \"aodh-db-create-qzrjp\" (UID: \"bbd6be73-ea8e-44e0-a951-970e42acf3cf\") " pod="openstack/aodh-db-create-qzrjp" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.880900 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df89h\" (UniqueName: \"kubernetes.io/projected/bbd6be73-ea8e-44e0-a951-970e42acf3cf-kube-api-access-df89h\") pod \"aodh-db-create-qzrjp\" (UID: \"bbd6be73-ea8e-44e0-a951-970e42acf3cf\") " pod="openstack/aodh-db-create-qzrjp" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.960240 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-qzrjp" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.965518 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5prrr\" (UniqueName: \"kubernetes.io/projected/b68576a6-b067-44ef-8322-a9c902ca5a86-kube-api-access-5prrr\") pod \"aodh-3647-account-create-update-vw4kb\" (UID: \"b68576a6-b067-44ef-8322-a9c902ca5a86\") " pod="openstack/aodh-3647-account-create-update-vw4kb" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.965821 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b68576a6-b067-44ef-8322-a9c902ca5a86-operator-scripts\") pod \"aodh-3647-account-create-update-vw4kb\" (UID: \"b68576a6-b067-44ef-8322-a9c902ca5a86\") " pod="openstack/aodh-3647-account-create-update-vw4kb" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.966596 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b68576a6-b067-44ef-8322-a9c902ca5a86-operator-scripts\") pod \"aodh-3647-account-create-update-vw4kb\" (UID: \"b68576a6-b067-44ef-8322-a9c902ca5a86\") " pod="openstack/aodh-3647-account-create-update-vw4kb" Dec 15 08:59:59 crc kubenswrapper[4876]: I1215 08:59:59.984307 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5prrr\" (UniqueName: \"kubernetes.io/projected/b68576a6-b067-44ef-8322-a9c902ca5a86-kube-api-access-5prrr\") pod \"aodh-3647-account-create-update-vw4kb\" (UID: \"b68576a6-b067-44ef-8322-a9c902ca5a86\") " pod="openstack/aodh-3647-account-create-update-vw4kb" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.051484 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-3647-account-create-update-vw4kb" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.178122 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v"] Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.186404 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.195983 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.196356 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.202800 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v"] Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.278610 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-config-volume\") pod \"collect-profiles-29429820-x8l9v\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.278798 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-secret-volume\") pod \"collect-profiles-29429820-x8l9v\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.278854 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgs86\" (UniqueName: \"kubernetes.io/projected/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-kube-api-access-kgs86\") pod \"collect-profiles-29429820-x8l9v\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.380354 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-secret-volume\") pod \"collect-profiles-29429820-x8l9v\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.380417 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgs86\" (UniqueName: \"kubernetes.io/projected/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-kube-api-access-kgs86\") pod \"collect-profiles-29429820-x8l9v\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.380549 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-config-volume\") pod \"collect-profiles-29429820-x8l9v\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.381579 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-config-volume\") pod \"collect-profiles-29429820-x8l9v\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.384615 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-secret-volume\") pod \"collect-profiles-29429820-x8l9v\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.396193 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgs86\" (UniqueName: \"kubernetes.io/projected/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-kube-api-access-kgs86\") pod \"collect-profiles-29429820-x8l9v\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:00 crc kubenswrapper[4876]: I1215 09:00:00.558584 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:01 crc kubenswrapper[4876]: W1215 09:00:01.060331 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbd6be73_ea8e_44e0_a951_970e42acf3cf.slice/crio-7f4f3c2d07691a3e001a9dd5a974f4575fadbe9dc84b87fb80fbcd1643f163b1 WatchSource:0}: Error finding container 7f4f3c2d07691a3e001a9dd5a974f4575fadbe9dc84b87fb80fbcd1643f163b1: Status 404 returned error can't find the container with id 7f4f3c2d07691a3e001a9dd5a974f4575fadbe9dc84b87fb80fbcd1643f163b1 Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.061574 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-3647-account-create-update-vw4kb"] Dec 15 09:00:01 crc kubenswrapper[4876]: W1215 09:00:01.064213 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod547569de_dc02_4f05_b8a6_c85c2ae2bf7c.slice/crio-24b125813b81d1af53bc9c0f5455f87526a02bfdcc4b266de92d77efa6d932fa WatchSource:0}: Error finding container 24b125813b81d1af53bc9c0f5455f87526a02bfdcc4b266de92d77efa6d932fa: Status 404 returned error can't find the container with id 24b125813b81d1af53bc9c0f5455f87526a02bfdcc4b266de92d77efa6d932fa Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.073141 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-qzrjp"] Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.081948 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v"] Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.883099 4876 generic.go:334] "Generic (PLEG): container finished" podID="b68576a6-b067-44ef-8322-a9c902ca5a86" containerID="8fd9be45165d412267a47276082eb7185c64df2b2cca136d546c892a348f129f" exitCode=0 Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.883182 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-3647-account-create-update-vw4kb" event={"ID":"b68576a6-b067-44ef-8322-a9c902ca5a86","Type":"ContainerDied","Data":"8fd9be45165d412267a47276082eb7185c64df2b2cca136d546c892a348f129f"} Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.883520 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-3647-account-create-update-vw4kb" event={"ID":"b68576a6-b067-44ef-8322-a9c902ca5a86","Type":"ContainerStarted","Data":"9c44e30e1b5df1aabd57b962485ec09e9e688a88158238c364817cc1a706184c"} Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.886082 4876 generic.go:334] "Generic (PLEG): container finished" podID="bbd6be73-ea8e-44e0-a951-970e42acf3cf" containerID="b57785ec56b8398e128a1255d1f03207676a8dc4a586fe66bfca9221cf751ed7" exitCode=0 Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.886167 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-qzrjp" event={"ID":"bbd6be73-ea8e-44e0-a951-970e42acf3cf","Type":"ContainerDied","Data":"b57785ec56b8398e128a1255d1f03207676a8dc4a586fe66bfca9221cf751ed7"} Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.886193 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-qzrjp" event={"ID":"bbd6be73-ea8e-44e0-a951-970e42acf3cf","Type":"ContainerStarted","Data":"7f4f3c2d07691a3e001a9dd5a974f4575fadbe9dc84b87fb80fbcd1643f163b1"} Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.888443 4876 generic.go:334] "Generic (PLEG): container finished" podID="547569de-dc02-4f05-b8a6-c85c2ae2bf7c" containerID="a5ddd3f9ed1bb2267755dccc145e4171483029c27d3539f4053c8b058acd6275" exitCode=0 Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.888486 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" event={"ID":"547569de-dc02-4f05-b8a6-c85c2ae2bf7c","Type":"ContainerDied","Data":"a5ddd3f9ed1bb2267755dccc145e4171483029c27d3539f4053c8b058acd6275"} Dec 15 09:00:01 crc kubenswrapper[4876]: I1215 09:00:01.888508 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" event={"ID":"547569de-dc02-4f05-b8a6-c85c2ae2bf7c","Type":"ContainerStarted","Data":"24b125813b81d1af53bc9c0f5455f87526a02bfdcc4b266de92d77efa6d932fa"} Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.448345 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-qzrjp" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.456574 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.500042 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-3647-account-create-update-vw4kb" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.548371 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-df89h\" (UniqueName: \"kubernetes.io/projected/bbd6be73-ea8e-44e0-a951-970e42acf3cf-kube-api-access-df89h\") pod \"bbd6be73-ea8e-44e0-a951-970e42acf3cf\" (UID: \"bbd6be73-ea8e-44e0-a951-970e42acf3cf\") " Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.548511 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-config-volume\") pod \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.548607 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbd6be73-ea8e-44e0-a951-970e42acf3cf-operator-scripts\") pod \"bbd6be73-ea8e-44e0-a951-970e42acf3cf\" (UID: \"bbd6be73-ea8e-44e0-a951-970e42acf3cf\") " Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.548693 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgs86\" (UniqueName: \"kubernetes.io/projected/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-kube-api-access-kgs86\") pod \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.548773 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-secret-volume\") pod \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\" (UID: \"547569de-dc02-4f05-b8a6-c85c2ae2bf7c\") " Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.549239 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-config-volume" (OuterVolumeSpecName: "config-volume") pod "547569de-dc02-4f05-b8a6-c85c2ae2bf7c" (UID: "547569de-dc02-4f05-b8a6-c85c2ae2bf7c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.549451 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbd6be73-ea8e-44e0-a951-970e42acf3cf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bbd6be73-ea8e-44e0-a951-970e42acf3cf" (UID: "bbd6be73-ea8e-44e0-a951-970e42acf3cf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.550316 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.550652 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bbd6be73-ea8e-44e0-a951-970e42acf3cf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.554330 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "547569de-dc02-4f05-b8a6-c85c2ae2bf7c" (UID: "547569de-dc02-4f05-b8a6-c85c2ae2bf7c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.554852 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-kube-api-access-kgs86" (OuterVolumeSpecName: "kube-api-access-kgs86") pod "547569de-dc02-4f05-b8a6-c85c2ae2bf7c" (UID: "547569de-dc02-4f05-b8a6-c85c2ae2bf7c"). InnerVolumeSpecName "kube-api-access-kgs86". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.555870 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbd6be73-ea8e-44e0-a951-970e42acf3cf-kube-api-access-df89h" (OuterVolumeSpecName: "kube-api-access-df89h") pod "bbd6be73-ea8e-44e0-a951-970e42acf3cf" (UID: "bbd6be73-ea8e-44e0-a951-970e42acf3cf"). InnerVolumeSpecName "kube-api-access-df89h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.652037 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b68576a6-b067-44ef-8322-a9c902ca5a86-operator-scripts\") pod \"b68576a6-b067-44ef-8322-a9c902ca5a86\" (UID: \"b68576a6-b067-44ef-8322-a9c902ca5a86\") " Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.652320 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5prrr\" (UniqueName: \"kubernetes.io/projected/b68576a6-b067-44ef-8322-a9c902ca5a86-kube-api-access-5prrr\") pod \"b68576a6-b067-44ef-8322-a9c902ca5a86\" (UID: \"b68576a6-b067-44ef-8322-a9c902ca5a86\") " Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.652500 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b68576a6-b067-44ef-8322-a9c902ca5a86-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b68576a6-b067-44ef-8322-a9c902ca5a86" (UID: "b68576a6-b067-44ef-8322-a9c902ca5a86"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.652752 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgs86\" (UniqueName: \"kubernetes.io/projected/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-kube-api-access-kgs86\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.652771 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/547569de-dc02-4f05-b8a6-c85c2ae2bf7c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.652784 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b68576a6-b067-44ef-8322-a9c902ca5a86-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.652796 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-df89h\" (UniqueName: \"kubernetes.io/projected/bbd6be73-ea8e-44e0-a951-970e42acf3cf-kube-api-access-df89h\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.655032 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b68576a6-b067-44ef-8322-a9c902ca5a86-kube-api-access-5prrr" (OuterVolumeSpecName: "kube-api-access-5prrr") pod "b68576a6-b067-44ef-8322-a9c902ca5a86" (UID: "b68576a6-b067-44ef-8322-a9c902ca5a86"). InnerVolumeSpecName "kube-api-access-5prrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.754789 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5prrr\" (UniqueName: \"kubernetes.io/projected/b68576a6-b067-44ef-8322-a9c902ca5a86-kube-api-access-5prrr\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.912560 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-qzrjp" event={"ID":"bbd6be73-ea8e-44e0-a951-970e42acf3cf","Type":"ContainerDied","Data":"7f4f3c2d07691a3e001a9dd5a974f4575fadbe9dc84b87fb80fbcd1643f163b1"} Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.912597 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f4f3c2d07691a3e001a9dd5a974f4575fadbe9dc84b87fb80fbcd1643f163b1" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.912640 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-qzrjp" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.914452 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" event={"ID":"547569de-dc02-4f05-b8a6-c85c2ae2bf7c","Type":"ContainerDied","Data":"24b125813b81d1af53bc9c0f5455f87526a02bfdcc4b266de92d77efa6d932fa"} Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.914472 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24b125813b81d1af53bc9c0f5455f87526a02bfdcc4b266de92d77efa6d932fa" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.914524 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.918525 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-3647-account-create-update-vw4kb" event={"ID":"b68576a6-b067-44ef-8322-a9c902ca5a86","Type":"ContainerDied","Data":"9c44e30e1b5df1aabd57b962485ec09e9e688a88158238c364817cc1a706184c"} Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.918573 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c44e30e1b5df1aabd57b962485ec09e9e688a88158238c364817cc1a706184c" Dec 15 09:00:03 crc kubenswrapper[4876]: I1215 09:00:03.918611 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-3647-account-create-update-vw4kb" Dec 15 09:00:04 crc kubenswrapper[4876]: I1215 09:00:04.568409 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg"] Dec 15 09:00:04 crc kubenswrapper[4876]: I1215 09:00:04.577337 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429775-5pnbg"] Dec 15 09:00:04 crc kubenswrapper[4876]: I1215 09:00:04.722750 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0341cdd-2d11-4530-8ba9-300fd9c900b6" path="/var/lib/kubelet/pods/e0341cdd-2d11-4530-8ba9-300fd9c900b6/volumes" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.010341 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-l2g6k"] Dec 15 09:00:05 crc kubenswrapper[4876]: E1215 09:00:05.011157 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b68576a6-b067-44ef-8322-a9c902ca5a86" containerName="mariadb-account-create-update" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.011181 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b68576a6-b067-44ef-8322-a9c902ca5a86" containerName="mariadb-account-create-update" Dec 15 09:00:05 crc kubenswrapper[4876]: E1215 09:00:05.011199 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="547569de-dc02-4f05-b8a6-c85c2ae2bf7c" containerName="collect-profiles" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.011207 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="547569de-dc02-4f05-b8a6-c85c2ae2bf7c" containerName="collect-profiles" Dec 15 09:00:05 crc kubenswrapper[4876]: E1215 09:00:05.011230 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbd6be73-ea8e-44e0-a951-970e42acf3cf" containerName="mariadb-database-create" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.011237 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbd6be73-ea8e-44e0-a951-970e42acf3cf" containerName="mariadb-database-create" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.011480 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbd6be73-ea8e-44e0-a951-970e42acf3cf" containerName="mariadb-database-create" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.011509 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="547569de-dc02-4f05-b8a6-c85c2ae2bf7c" containerName="collect-profiles" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.011525 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b68576a6-b067-44ef-8322-a9c902ca5a86" containerName="mariadb-account-create-update" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.012459 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.016223 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.016584 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.016934 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-hdgnx" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.016963 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.029072 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-l2g6k"] Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.186283 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-combined-ca-bundle\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.186378 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-config-data\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.186455 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-scripts\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.186769 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knmgp\" (UniqueName: \"kubernetes.io/projected/d98f4901-6ef5-4f21-a8fc-6830229131c3-kube-api-access-knmgp\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.288214 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-combined-ca-bundle\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.288342 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-config-data\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.288370 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-scripts\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.288467 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knmgp\" (UniqueName: \"kubernetes.io/projected/d98f4901-6ef5-4f21-a8fc-6830229131c3-kube-api-access-knmgp\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.295062 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-scripts\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.299601 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-combined-ca-bundle\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.309456 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-config-data\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.310914 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knmgp\" (UniqueName: \"kubernetes.io/projected/d98f4901-6ef5-4f21-a8fc-6830229131c3-kube-api-access-knmgp\") pod \"aodh-db-sync-l2g6k\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.337143 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.871175 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-l2g6k"] Dec 15 09:00:05 crc kubenswrapper[4876]: W1215 09:00:05.879130 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd98f4901_6ef5_4f21_a8fc_6830229131c3.slice/crio-6545b0e587912f1d24fa4bda882265f09678aa5e9e24bf45a1e1ba17c5f2f9d1 WatchSource:0}: Error finding container 6545b0e587912f1d24fa4bda882265f09678aa5e9e24bf45a1e1ba17c5f2f9d1: Status 404 returned error can't find the container with id 6545b0e587912f1d24fa4bda882265f09678aa5e9e24bf45a1e1ba17c5f2f9d1 Dec 15 09:00:05 crc kubenswrapper[4876]: I1215 09:00:05.938786 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-l2g6k" event={"ID":"d98f4901-6ef5-4f21-a8fc-6830229131c3","Type":"ContainerStarted","Data":"6545b0e587912f1d24fa4bda882265f09678aa5e9e24bf45a1e1ba17c5f2f9d1"} Dec 15 09:00:09 crc kubenswrapper[4876]: I1215 09:00:09.989159 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-l2g6k" event={"ID":"d98f4901-6ef5-4f21-a8fc-6830229131c3","Type":"ContainerStarted","Data":"52720c96b580c90f6a945b4af3a957279e04a3c968e1d8b7b7a5b737f2bc29b9"} Dec 15 09:00:10 crc kubenswrapper[4876]: I1215 09:00:10.007082 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-l2g6k" podStartSLOduration=2.200366698 podStartE2EDuration="6.007058332s" podCreationTimestamp="2025-12-15 09:00:04 +0000 UTC" firstStartedPulling="2025-12-15 09:00:05.881688544 +0000 UTC m=+7731.452831455" lastFinishedPulling="2025-12-15 09:00:09.688380178 +0000 UTC m=+7735.259523089" observedRunningTime="2025-12-15 09:00:10.004764982 +0000 UTC m=+7735.575907893" watchObservedRunningTime="2025-12-15 09:00:10.007058332 +0000 UTC m=+7735.578201263" Dec 15 09:00:12 crc kubenswrapper[4876]: I1215 09:00:12.007983 4876 generic.go:334] "Generic (PLEG): container finished" podID="d98f4901-6ef5-4f21-a8fc-6830229131c3" containerID="52720c96b580c90f6a945b4af3a957279e04a3c968e1d8b7b7a5b737f2bc29b9" exitCode=0 Dec 15 09:00:12 crc kubenswrapper[4876]: I1215 09:00:12.008068 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-l2g6k" event={"ID":"d98f4901-6ef5-4f21-a8fc-6830229131c3","Type":"ContainerDied","Data":"52720c96b580c90f6a945b4af3a957279e04a3c968e1d8b7b7a5b737f2bc29b9"} Dec 15 09:00:12 crc kubenswrapper[4876]: I1215 09:00:12.412734 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.383159 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.455514 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-combined-ca-bundle\") pod \"d98f4901-6ef5-4f21-a8fc-6830229131c3\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.455587 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-config-data\") pod \"d98f4901-6ef5-4f21-a8fc-6830229131c3\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.455615 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knmgp\" (UniqueName: \"kubernetes.io/projected/d98f4901-6ef5-4f21-a8fc-6830229131c3-kube-api-access-knmgp\") pod \"d98f4901-6ef5-4f21-a8fc-6830229131c3\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.455816 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-scripts\") pod \"d98f4901-6ef5-4f21-a8fc-6830229131c3\" (UID: \"d98f4901-6ef5-4f21-a8fc-6830229131c3\") " Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.461402 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-scripts" (OuterVolumeSpecName: "scripts") pod "d98f4901-6ef5-4f21-a8fc-6830229131c3" (UID: "d98f4901-6ef5-4f21-a8fc-6830229131c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.462387 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d98f4901-6ef5-4f21-a8fc-6830229131c3-kube-api-access-knmgp" (OuterVolumeSpecName: "kube-api-access-knmgp") pod "d98f4901-6ef5-4f21-a8fc-6830229131c3" (UID: "d98f4901-6ef5-4f21-a8fc-6830229131c3"). InnerVolumeSpecName "kube-api-access-knmgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.481818 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d98f4901-6ef5-4f21-a8fc-6830229131c3" (UID: "d98f4901-6ef5-4f21-a8fc-6830229131c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.484892 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-config-data" (OuterVolumeSpecName: "config-data") pod "d98f4901-6ef5-4f21-a8fc-6830229131c3" (UID: "d98f4901-6ef5-4f21-a8fc-6830229131c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.558370 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.558394 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.558405 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knmgp\" (UniqueName: \"kubernetes.io/projected/d98f4901-6ef5-4f21-a8fc-6830229131c3-kube-api-access-knmgp\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:13 crc kubenswrapper[4876]: I1215 09:00:13.558415 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d98f4901-6ef5-4f21-a8fc-6830229131c3-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.027246 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-l2g6k" event={"ID":"d98f4901-6ef5-4f21-a8fc-6830229131c3","Type":"ContainerDied","Data":"6545b0e587912f1d24fa4bda882265f09678aa5e9e24bf45a1e1ba17c5f2f9d1"} Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.027282 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6545b0e587912f1d24fa4bda882265f09678aa5e9e24bf45a1e1ba17c5f2f9d1" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.027352 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-l2g6k" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.671534 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 15 09:00:14 crc kubenswrapper[4876]: E1215 09:00:14.672467 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d98f4901-6ef5-4f21-a8fc-6830229131c3" containerName="aodh-db-sync" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.672487 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="d98f4901-6ef5-4f21-a8fc-6830229131c3" containerName="aodh-db-sync" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.672762 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="d98f4901-6ef5-4f21-a8fc-6830229131c3" containerName="aodh-db-sync" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.675810 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.678844 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.679134 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-hdgnx" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.680626 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.772162 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.790038 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcd2c908-a394-4285-a770-7d933f4491f2-combined-ca-bundle\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.790258 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcd2c908-a394-4285-a770-7d933f4491f2-config-data\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.790303 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdfjf\" (UniqueName: \"kubernetes.io/projected/dcd2c908-a394-4285-a770-7d933f4491f2-kube-api-access-hdfjf\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.790398 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcd2c908-a394-4285-a770-7d933f4491f2-scripts\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.893089 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcd2c908-a394-4285-a770-7d933f4491f2-combined-ca-bundle\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.893333 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcd2c908-a394-4285-a770-7d933f4491f2-config-data\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.893386 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdfjf\" (UniqueName: \"kubernetes.io/projected/dcd2c908-a394-4285-a770-7d933f4491f2-kube-api-access-hdfjf\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.893467 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcd2c908-a394-4285-a770-7d933f4491f2-scripts\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.895648 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.895955 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.899474 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcd2c908-a394-4285-a770-7d933f4491f2-combined-ca-bundle\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.907777 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcd2c908-a394-4285-a770-7d933f4491f2-config-data\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.910090 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dcd2c908-a394-4285-a770-7d933f4491f2-scripts\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:14 crc kubenswrapper[4876]: I1215 09:00:14.912611 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdfjf\" (UniqueName: \"kubernetes.io/projected/dcd2c908-a394-4285-a770-7d933f4491f2-kube-api-access-hdfjf\") pod \"aodh-0\" (UID: \"dcd2c908-a394-4285-a770-7d933f4491f2\") " pod="openstack/aodh-0" Dec 15 09:00:15 crc kubenswrapper[4876]: I1215 09:00:15.058056 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-hdgnx" Dec 15 09:00:15 crc kubenswrapper[4876]: I1215 09:00:15.066376 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 15 09:00:15 crc kubenswrapper[4876]: I1215 09:00:15.641457 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 15 09:00:16 crc kubenswrapper[4876]: I1215 09:00:16.044164 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"dcd2c908-a394-4285-a770-7d933f4491f2","Type":"ContainerStarted","Data":"9b797275753fafe6ca31205aa6ef33adc80aa4b8fc5ddd6edf49c0edc73efa81"} Dec 15 09:00:16 crc kubenswrapper[4876]: I1215 09:00:16.989945 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:00:16 crc kubenswrapper[4876]: I1215 09:00:16.990530 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="ceilometer-central-agent" containerID="cri-o://6a7d6d58bd0e2798d17c3dc4fc6b4d4aa5c03cf7126e608cd194c7a04f9e8bb1" gracePeriod=30 Dec 15 09:00:16 crc kubenswrapper[4876]: I1215 09:00:16.990603 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="proxy-httpd" containerID="cri-o://d28b2b05906d85e75013210282360d4c91e775a51c541a1c69caf8f27df19617" gracePeriod=30 Dec 15 09:00:16 crc kubenswrapper[4876]: I1215 09:00:16.990643 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="sg-core" containerID="cri-o://62965a326bc37bb46dc030676c87e17319ce6dcfc9f650ce0da76b5b8a236d6e" gracePeriod=30 Dec 15 09:00:16 crc kubenswrapper[4876]: I1215 09:00:16.990680 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="ceilometer-notification-agent" containerID="cri-o://68d7c1791a7704e8d8aa9a451458e780a13836d03dca16ab65e310c344ced490" gracePeriod=30 Dec 15 09:00:17 crc kubenswrapper[4876]: I1215 09:00:17.055118 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"dcd2c908-a394-4285-a770-7d933f4491f2","Type":"ContainerStarted","Data":"eccd8056a6ae07d42b7dd12971db51d0f457c0a284ee2a3a89c3e9868741983f"} Dec 15 09:00:17 crc kubenswrapper[4876]: E1215 09:00:17.541929 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70e73b46_01ac_4f61_9927_f90698e68866.slice/crio-6a7d6d58bd0e2798d17c3dc4fc6b4d4aa5c03cf7126e608cd194c7a04f9e8bb1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70e73b46_01ac_4f61_9927_f90698e68866.slice/crio-conmon-6a7d6d58bd0e2798d17c3dc4fc6b4d4aa5c03cf7126e608cd194c7a04f9e8bb1.scope\": RecentStats: unable to find data in memory cache]" Dec 15 09:00:18 crc kubenswrapper[4876]: I1215 09:00:18.073010 4876 generic.go:334] "Generic (PLEG): container finished" podID="70e73b46-01ac-4f61-9927-f90698e68866" containerID="d28b2b05906d85e75013210282360d4c91e775a51c541a1c69caf8f27df19617" exitCode=0 Dec 15 09:00:18 crc kubenswrapper[4876]: I1215 09:00:18.073050 4876 generic.go:334] "Generic (PLEG): container finished" podID="70e73b46-01ac-4f61-9927-f90698e68866" containerID="62965a326bc37bb46dc030676c87e17319ce6dcfc9f650ce0da76b5b8a236d6e" exitCode=2 Dec 15 09:00:18 crc kubenswrapper[4876]: I1215 09:00:18.073061 4876 generic.go:334] "Generic (PLEG): container finished" podID="70e73b46-01ac-4f61-9927-f90698e68866" containerID="6a7d6d58bd0e2798d17c3dc4fc6b4d4aa5c03cf7126e608cd194c7a04f9e8bb1" exitCode=0 Dec 15 09:00:18 crc kubenswrapper[4876]: I1215 09:00:18.073091 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70e73b46-01ac-4f61-9927-f90698e68866","Type":"ContainerDied","Data":"d28b2b05906d85e75013210282360d4c91e775a51c541a1c69caf8f27df19617"} Dec 15 09:00:18 crc kubenswrapper[4876]: I1215 09:00:18.073153 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70e73b46-01ac-4f61-9927-f90698e68866","Type":"ContainerDied","Data":"62965a326bc37bb46dc030676c87e17319ce6dcfc9f650ce0da76b5b8a236d6e"} Dec 15 09:00:18 crc kubenswrapper[4876]: I1215 09:00:18.073170 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70e73b46-01ac-4f61-9927-f90698e68866","Type":"ContainerDied","Data":"6a7d6d58bd0e2798d17c3dc4fc6b4d4aa5c03cf7126e608cd194c7a04f9e8bb1"} Dec 15 09:00:18 crc kubenswrapper[4876]: I1215 09:00:18.075182 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"dcd2c908-a394-4285-a770-7d933f4491f2","Type":"ContainerStarted","Data":"f8c93d0677ccb9463bc767f4cb1b849e72837a672774abd1ae816710211d49c6"} Dec 15 09:00:19 crc kubenswrapper[4876]: I1215 09:00:19.109203 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"dcd2c908-a394-4285-a770-7d933f4491f2","Type":"ContainerStarted","Data":"fcbd81dc7135567e5bdc1db5064c740c956f8cebe98a9ce17c0ba29a288544b6"} Dec 15 09:00:20 crc kubenswrapper[4876]: I1215 09:00:20.048547 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-qd7sv"] Dec 15 09:00:20 crc kubenswrapper[4876]: I1215 09:00:20.060701 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-qd7sv"] Dec 15 09:00:20 crc kubenswrapper[4876]: I1215 09:00:20.718618 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27a2a74e-1254-40a9-bd85-a381999b2a8c" path="/var/lib/kubelet/pods/27a2a74e-1254-40a9-bd85-a381999b2a8c/volumes" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.132811 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"dcd2c908-a394-4285-a770-7d933f4491f2","Type":"ContainerStarted","Data":"47e2ace4d16266bac64a42c4cbf07d8d776de7d84b43e366c7bc073d8f7090e5"} Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.136860 4876 generic.go:334] "Generic (PLEG): container finished" podID="70e73b46-01ac-4f61-9927-f90698e68866" containerID="68d7c1791a7704e8d8aa9a451458e780a13836d03dca16ab65e310c344ced490" exitCode=0 Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.136900 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70e73b46-01ac-4f61-9927-f90698e68866","Type":"ContainerDied","Data":"68d7c1791a7704e8d8aa9a451458e780a13836d03dca16ab65e310c344ced490"} Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.157934 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.489977376 podStartE2EDuration="7.157907909s" podCreationTimestamp="2025-12-15 09:00:14 +0000 UTC" firstStartedPulling="2025-12-15 09:00:15.645446107 +0000 UTC m=+7741.216589018" lastFinishedPulling="2025-12-15 09:00:20.31337648 +0000 UTC m=+7745.884519551" observedRunningTime="2025-12-15 09:00:21.154075309 +0000 UTC m=+7746.725218240" watchObservedRunningTime="2025-12-15 09:00:21.157907909 +0000 UTC m=+7746.729050830" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.414336 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.530883 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-config-data\") pod \"70e73b46-01ac-4f61-9927-f90698e68866\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.530991 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-run-httpd\") pod \"70e73b46-01ac-4f61-9927-f90698e68866\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.531052 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-sg-core-conf-yaml\") pod \"70e73b46-01ac-4f61-9927-f90698e68866\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.531180 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-log-httpd\") pod \"70e73b46-01ac-4f61-9927-f90698e68866\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.531245 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqpqz\" (UniqueName: \"kubernetes.io/projected/70e73b46-01ac-4f61-9927-f90698e68866-kube-api-access-pqpqz\") pod \"70e73b46-01ac-4f61-9927-f90698e68866\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.531332 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-scripts\") pod \"70e73b46-01ac-4f61-9927-f90698e68866\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.531369 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-combined-ca-bundle\") pod \"70e73b46-01ac-4f61-9927-f90698e68866\" (UID: \"70e73b46-01ac-4f61-9927-f90698e68866\") " Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.532227 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "70e73b46-01ac-4f61-9927-f90698e68866" (UID: "70e73b46-01ac-4f61-9927-f90698e68866"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.533029 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "70e73b46-01ac-4f61-9927-f90698e68866" (UID: "70e73b46-01ac-4f61-9927-f90698e68866"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.539014 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70e73b46-01ac-4f61-9927-f90698e68866-kube-api-access-pqpqz" (OuterVolumeSpecName: "kube-api-access-pqpqz") pod "70e73b46-01ac-4f61-9927-f90698e68866" (UID: "70e73b46-01ac-4f61-9927-f90698e68866"). InnerVolumeSpecName "kube-api-access-pqpqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.546271 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-scripts" (OuterVolumeSpecName: "scripts") pod "70e73b46-01ac-4f61-9927-f90698e68866" (UID: "70e73b46-01ac-4f61-9927-f90698e68866"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.583334 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "70e73b46-01ac-4f61-9927-f90698e68866" (UID: "70e73b46-01ac-4f61-9927-f90698e68866"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.624323 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70e73b46-01ac-4f61-9927-f90698e68866" (UID: "70e73b46-01ac-4f61-9927-f90698e68866"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.634291 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqpqz\" (UniqueName: \"kubernetes.io/projected/70e73b46-01ac-4f61-9927-f90698e68866-kube-api-access-pqpqz\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.634360 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.634376 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.634418 4876 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.634433 4876 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.634445 4876 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70e73b46-01ac-4f61-9927-f90698e68866-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.675218 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-config-data" (OuterVolumeSpecName: "config-data") pod "70e73b46-01ac-4f61-9927-f90698e68866" (UID: "70e73b46-01ac-4f61-9927-f90698e68866"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:21 crc kubenswrapper[4876]: I1215 09:00:21.741320 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70e73b46-01ac-4f61-9927-f90698e68866-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.147402 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"70e73b46-01ac-4f61-9927-f90698e68866","Type":"ContainerDied","Data":"3c2e9e384ef0f7e369dc51700b5e449d39b5aa31840cccacae597f6a17264cf6"} Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.147442 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.147478 4876 scope.go:117] "RemoveContainer" containerID="d28b2b05906d85e75013210282360d4c91e775a51c541a1c69caf8f27df19617" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.171413 4876 scope.go:117] "RemoveContainer" containerID="62965a326bc37bb46dc030676c87e17319ce6dcfc9f650ce0da76b5b8a236d6e" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.186179 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.195655 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.212314 4876 scope.go:117] "RemoveContainer" containerID="68d7c1791a7704e8d8aa9a451458e780a13836d03dca16ab65e310c344ced490" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.222434 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:00:22 crc kubenswrapper[4876]: E1215 09:00:22.222910 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="ceilometer-central-agent" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.222992 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="ceilometer-central-agent" Dec 15 09:00:22 crc kubenswrapper[4876]: E1215 09:00:22.223034 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="proxy-httpd" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.223043 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="proxy-httpd" Dec 15 09:00:22 crc kubenswrapper[4876]: E1215 09:00:22.223062 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="ceilometer-notification-agent" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.223070 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="ceilometer-notification-agent" Dec 15 09:00:22 crc kubenswrapper[4876]: E1215 09:00:22.223092 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="sg-core" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.223103 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="sg-core" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.223330 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="ceilometer-central-agent" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.223349 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="sg-core" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.223385 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="ceilometer-notification-agent" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.223399 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="70e73b46-01ac-4f61-9927-f90698e68866" containerName="proxy-httpd" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.225305 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.227494 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.227973 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.243081 4876 scope.go:117] "RemoveContainer" containerID="6a7d6d58bd0e2798d17c3dc4fc6b4d4aa5c03cf7126e608cd194c7a04f9e8bb1" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.248786 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.353961 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-config-data\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.354000 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-run-httpd\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.354077 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-scripts\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.354124 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4b47\" (UniqueName: \"kubernetes.io/projected/c671dbf8-00c5-44c7-9f64-4153af8e8e48-kube-api-access-k4b47\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.354166 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.354210 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.354232 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-log-httpd\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.456219 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-scripts\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.456303 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4b47\" (UniqueName: \"kubernetes.io/projected/c671dbf8-00c5-44c7-9f64-4153af8e8e48-kube-api-access-k4b47\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.456418 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.456534 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.456561 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-log-httpd\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.456706 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-config-data\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.456741 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-run-httpd\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.457304 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-log-httpd\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.457534 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-run-httpd\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.464028 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.464560 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-config-data\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.465136 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-scripts\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.466705 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.473716 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4b47\" (UniqueName: \"kubernetes.io/projected/c671dbf8-00c5-44c7-9f64-4153af8e8e48-kube-api-access-k4b47\") pod \"ceilometer-0\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.560631 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 09:00:22 crc kubenswrapper[4876]: I1215 09:00:22.728535 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70e73b46-01ac-4f61-9927-f90698e68866" path="/var/lib/kubelet/pods/70e73b46-01ac-4f61-9927-f90698e68866/volumes" Dec 15 09:00:23 crc kubenswrapper[4876]: I1215 09:00:23.141513 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:00:23 crc kubenswrapper[4876]: I1215 09:00:23.160570 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c671dbf8-00c5-44c7-9f64-4153af8e8e48","Type":"ContainerStarted","Data":"4b8b859b77dc11d8fa4b81d9d30b8837e0f85063c27dffd2a98697c04f232907"} Dec 15 09:00:24 crc kubenswrapper[4876]: I1215 09:00:24.173122 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c671dbf8-00c5-44c7-9f64-4153af8e8e48","Type":"ContainerStarted","Data":"1f83666543781c1ff70a28c0ddb3a10976cf83c6c49c7dc46f323def2410889a"} Dec 15 09:00:24 crc kubenswrapper[4876]: I1215 09:00:24.173476 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c671dbf8-00c5-44c7-9f64-4153af8e8e48","Type":"ContainerStarted","Data":"81be2cbd8493d396ade0f9bee2eefdcdcdd6ba7d42912332a33c75f4da674739"} Dec 15 09:00:25 crc kubenswrapper[4876]: I1215 09:00:25.183773 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c671dbf8-00c5-44c7-9f64-4153af8e8e48","Type":"ContainerStarted","Data":"57af1a8ed7437691ebb67946f3183d8caa6235cbc5fcb53599cbf60edd4eaf6e"} Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.507570 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-n9md8"] Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.509097 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-n9md8" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.530864 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-n9md8"] Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.580512 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-683e-account-create-update-tw7lf"] Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.581765 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-683e-account-create-update-tw7lf" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.583612 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.592556 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-683e-account-create-update-tw7lf"] Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.636549 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9g8fc\" (UniqueName: \"kubernetes.io/projected/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-kube-api-access-9g8fc\") pod \"manila-db-create-n9md8\" (UID: \"91c93649-bf5e-4e88-8f80-5cb4f62d2b19\") " pod="openstack/manila-db-create-n9md8" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.636713 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-operator-scripts\") pod \"manila-db-create-n9md8\" (UID: \"91c93649-bf5e-4e88-8f80-5cb4f62d2b19\") " pod="openstack/manila-db-create-n9md8" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.738276 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9g8fc\" (UniqueName: \"kubernetes.io/projected/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-kube-api-access-9g8fc\") pod \"manila-db-create-n9md8\" (UID: \"91c93649-bf5e-4e88-8f80-5cb4f62d2b19\") " pod="openstack/manila-db-create-n9md8" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.738647 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8srk\" (UniqueName: \"kubernetes.io/projected/28b4882c-bb24-406f-bf0f-9d788a13c40f-kube-api-access-h8srk\") pod \"manila-683e-account-create-update-tw7lf\" (UID: \"28b4882c-bb24-406f-bf0f-9d788a13c40f\") " pod="openstack/manila-683e-account-create-update-tw7lf" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.738736 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-operator-scripts\") pod \"manila-db-create-n9md8\" (UID: \"91c93649-bf5e-4e88-8f80-5cb4f62d2b19\") " pod="openstack/manila-db-create-n9md8" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.738785 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28b4882c-bb24-406f-bf0f-9d788a13c40f-operator-scripts\") pod \"manila-683e-account-create-update-tw7lf\" (UID: \"28b4882c-bb24-406f-bf0f-9d788a13c40f\") " pod="openstack/manila-683e-account-create-update-tw7lf" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.739498 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-operator-scripts\") pod \"manila-db-create-n9md8\" (UID: \"91c93649-bf5e-4e88-8f80-5cb4f62d2b19\") " pod="openstack/manila-db-create-n9md8" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.768093 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9g8fc\" (UniqueName: \"kubernetes.io/projected/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-kube-api-access-9g8fc\") pod \"manila-db-create-n9md8\" (UID: \"91c93649-bf5e-4e88-8f80-5cb4f62d2b19\") " pod="openstack/manila-db-create-n9md8" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.828588 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-n9md8" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.842552 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8srk\" (UniqueName: \"kubernetes.io/projected/28b4882c-bb24-406f-bf0f-9d788a13c40f-kube-api-access-h8srk\") pod \"manila-683e-account-create-update-tw7lf\" (UID: \"28b4882c-bb24-406f-bf0f-9d788a13c40f\") " pod="openstack/manila-683e-account-create-update-tw7lf" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.843768 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28b4882c-bb24-406f-bf0f-9d788a13c40f-operator-scripts\") pod \"manila-683e-account-create-update-tw7lf\" (UID: \"28b4882c-bb24-406f-bf0f-9d788a13c40f\") " pod="openstack/manila-683e-account-create-update-tw7lf" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.844440 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28b4882c-bb24-406f-bf0f-9d788a13c40f-operator-scripts\") pod \"manila-683e-account-create-update-tw7lf\" (UID: \"28b4882c-bb24-406f-bf0f-9d788a13c40f\") " pod="openstack/manila-683e-account-create-update-tw7lf" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.866749 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8srk\" (UniqueName: \"kubernetes.io/projected/28b4882c-bb24-406f-bf0f-9d788a13c40f-kube-api-access-h8srk\") pod \"manila-683e-account-create-update-tw7lf\" (UID: \"28b4882c-bb24-406f-bf0f-9d788a13c40f\") " pod="openstack/manila-683e-account-create-update-tw7lf" Dec 15 09:00:26 crc kubenswrapper[4876]: I1215 09:00:26.896682 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-683e-account-create-update-tw7lf" Dec 15 09:00:27 crc kubenswrapper[4876]: I1215 09:00:27.229645 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c671dbf8-00c5-44c7-9f64-4153af8e8e48","Type":"ContainerStarted","Data":"a6441b704cb64ee0ddf0e12aabd72a9f0dd99d3c32d711354a71087abf11fb72"} Dec 15 09:00:27 crc kubenswrapper[4876]: I1215 09:00:27.230135 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 15 09:00:27 crc kubenswrapper[4876]: I1215 09:00:27.325849 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:00:27 crc kubenswrapper[4876]: I1215 09:00:27.325908 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:00:27 crc kubenswrapper[4876]: I1215 09:00:27.325960 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:00:27 crc kubenswrapper[4876]: I1215 09:00:27.328063 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:00:27 crc kubenswrapper[4876]: I1215 09:00:27.328157 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" gracePeriod=600 Dec 15 09:00:27 crc kubenswrapper[4876]: I1215 09:00:27.451543 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.51294298 podStartE2EDuration="5.451518454s" podCreationTimestamp="2025-12-15 09:00:22 +0000 UTC" firstStartedPulling="2025-12-15 09:00:23.144343103 +0000 UTC m=+7748.715486014" lastFinishedPulling="2025-12-15 09:00:26.082918577 +0000 UTC m=+7751.654061488" observedRunningTime="2025-12-15 09:00:27.250701451 +0000 UTC m=+7752.821844372" watchObservedRunningTime="2025-12-15 09:00:27.451518454 +0000 UTC m=+7753.022661365" Dec 15 09:00:27 crc kubenswrapper[4876]: I1215 09:00:27.457450 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-n9md8"] Dec 15 09:00:27 crc kubenswrapper[4876]: E1215 09:00:27.476280 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:00:27 crc kubenswrapper[4876]: I1215 09:00:27.616076 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-683e-account-create-update-tw7lf"] Dec 15 09:00:27 crc kubenswrapper[4876]: W1215 09:00:27.617973 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28b4882c_bb24_406f_bf0f_9d788a13c40f.slice/crio-65728ac8c8fc007d69a5d6873eacdfe360d23fb02c8894ba9bae2037b5005d59 WatchSource:0}: Error finding container 65728ac8c8fc007d69a5d6873eacdfe360d23fb02c8894ba9bae2037b5005d59: Status 404 returned error can't find the container with id 65728ac8c8fc007d69a5d6873eacdfe360d23fb02c8894ba9bae2037b5005d59 Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.242526 4876 generic.go:334] "Generic (PLEG): container finished" podID="91c93649-bf5e-4e88-8f80-5cb4f62d2b19" containerID="7070b3d3f2182ae72fbee70905b42763a2386161e4f93216e83ada18fb1fb34e" exitCode=0 Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.242786 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-n9md8" event={"ID":"91c93649-bf5e-4e88-8f80-5cb4f62d2b19","Type":"ContainerDied","Data":"7070b3d3f2182ae72fbee70905b42763a2386161e4f93216e83ada18fb1fb34e"} Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.242947 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-n9md8" event={"ID":"91c93649-bf5e-4e88-8f80-5cb4f62d2b19","Type":"ContainerStarted","Data":"fe2059750966c4990d25d0cd2abab469379c29b1bea1b8cb6cad221650f248f5"} Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.245678 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" exitCode=0 Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.245742 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3"} Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.245774 4876 scope.go:117] "RemoveContainer" containerID="29d85bfa3ef5674dabcb7af04441e135ceccc897cf5ebfb55f5a95b2fb0e0285" Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.246626 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:00:28 crc kubenswrapper[4876]: E1215 09:00:28.246922 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.250158 4876 generic.go:334] "Generic (PLEG): container finished" podID="28b4882c-bb24-406f-bf0f-9d788a13c40f" containerID="03bd51b219b12a040b629a8cc59c12ee340153e8e2170b15b6b1f1a043ea937e" exitCode=0 Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.251342 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-683e-account-create-update-tw7lf" event={"ID":"28b4882c-bb24-406f-bf0f-9d788a13c40f","Type":"ContainerDied","Data":"03bd51b219b12a040b629a8cc59c12ee340153e8e2170b15b6b1f1a043ea937e"} Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.251376 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-683e-account-create-update-tw7lf" event={"ID":"28b4882c-bb24-406f-bf0f-9d788a13c40f","Type":"ContainerStarted","Data":"65728ac8c8fc007d69a5d6873eacdfe360d23fb02c8894ba9bae2037b5005d59"} Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.893091 4876 scope.go:117] "RemoveContainer" containerID="1373d38b5c2b523d86836d0af2a2fc4b453a51e1d6915834ddd83a4b61fc6746" Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.927803 4876 scope.go:117] "RemoveContainer" containerID="349ee55e5f079000e47bcfce05e88385252e256308f374f6d22eff79cc789ae6" Dec 15 09:00:28 crc kubenswrapper[4876]: I1215 09:00:28.983904 4876 scope.go:117] "RemoveContainer" containerID="f2c98e708fccc39766fbff57d8e95ae990cec521ed3554ee35fc6d4ac95612e4" Dec 15 09:00:29 crc kubenswrapper[4876]: I1215 09:00:29.071840 4876 scope.go:117] "RemoveContainer" containerID="fb4cbc4feb22b9cfedbf8e695e30e35e84746172a048ac6a5fb3eef7548a4c90" Dec 15 09:00:29 crc kubenswrapper[4876]: I1215 09:00:29.840835 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-683e-account-create-update-tw7lf" Dec 15 09:00:29 crc kubenswrapper[4876]: I1215 09:00:29.848430 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-n9md8" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.018435 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28b4882c-bb24-406f-bf0f-9d788a13c40f-operator-scripts\") pod \"28b4882c-bb24-406f-bf0f-9d788a13c40f\" (UID: \"28b4882c-bb24-406f-bf0f-9d788a13c40f\") " Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.018492 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9g8fc\" (UniqueName: \"kubernetes.io/projected/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-kube-api-access-9g8fc\") pod \"91c93649-bf5e-4e88-8f80-5cb4f62d2b19\" (UID: \"91c93649-bf5e-4e88-8f80-5cb4f62d2b19\") " Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.018587 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8srk\" (UniqueName: \"kubernetes.io/projected/28b4882c-bb24-406f-bf0f-9d788a13c40f-kube-api-access-h8srk\") pod \"28b4882c-bb24-406f-bf0f-9d788a13c40f\" (UID: \"28b4882c-bb24-406f-bf0f-9d788a13c40f\") " Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.018606 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-operator-scripts\") pod \"91c93649-bf5e-4e88-8f80-5cb4f62d2b19\" (UID: \"91c93649-bf5e-4e88-8f80-5cb4f62d2b19\") " Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.019515 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "91c93649-bf5e-4e88-8f80-5cb4f62d2b19" (UID: "91c93649-bf5e-4e88-8f80-5cb4f62d2b19"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.019907 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28b4882c-bb24-406f-bf0f-9d788a13c40f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "28b4882c-bb24-406f-bf0f-9d788a13c40f" (UID: "28b4882c-bb24-406f-bf0f-9d788a13c40f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.027399 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28b4882c-bb24-406f-bf0f-9d788a13c40f-kube-api-access-h8srk" (OuterVolumeSpecName: "kube-api-access-h8srk") pod "28b4882c-bb24-406f-bf0f-9d788a13c40f" (UID: "28b4882c-bb24-406f-bf0f-9d788a13c40f"). InnerVolumeSpecName "kube-api-access-h8srk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.027748 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-kube-api-access-9g8fc" (OuterVolumeSpecName: "kube-api-access-9g8fc") pod "91c93649-bf5e-4e88-8f80-5cb4f62d2b19" (UID: "91c93649-bf5e-4e88-8f80-5cb4f62d2b19"). InnerVolumeSpecName "kube-api-access-9g8fc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.131319 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28b4882c-bb24-406f-bf0f-9d788a13c40f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.131581 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9g8fc\" (UniqueName: \"kubernetes.io/projected/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-kube-api-access-9g8fc\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.131672 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8srk\" (UniqueName: \"kubernetes.io/projected/28b4882c-bb24-406f-bf0f-9d788a13c40f-kube-api-access-h8srk\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.131784 4876 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91c93649-bf5e-4e88-8f80-5cb4f62d2b19-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.273716 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-n9md8" event={"ID":"91c93649-bf5e-4e88-8f80-5cb4f62d2b19","Type":"ContainerDied","Data":"fe2059750966c4990d25d0cd2abab469379c29b1bea1b8cb6cad221650f248f5"} Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.273756 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-n9md8" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.273780 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe2059750966c4990d25d0cd2abab469379c29b1bea1b8cb6cad221650f248f5" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.276300 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-683e-account-create-update-tw7lf" event={"ID":"28b4882c-bb24-406f-bf0f-9d788a13c40f","Type":"ContainerDied","Data":"65728ac8c8fc007d69a5d6873eacdfe360d23fb02c8894ba9bae2037b5005d59"} Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.276325 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65728ac8c8fc007d69a5d6873eacdfe360d23fb02c8894ba9bae2037b5005d59" Dec 15 09:00:30 crc kubenswrapper[4876]: I1215 09:00:30.276502 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-683e-account-create-update-tw7lf" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.935610 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-dqgdr"] Dec 15 09:00:31 crc kubenswrapper[4876]: E1215 09:00:31.936346 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28b4882c-bb24-406f-bf0f-9d788a13c40f" containerName="mariadb-account-create-update" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.936364 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="28b4882c-bb24-406f-bf0f-9d788a13c40f" containerName="mariadb-account-create-update" Dec 15 09:00:31 crc kubenswrapper[4876]: E1215 09:00:31.936379 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91c93649-bf5e-4e88-8f80-5cb4f62d2b19" containerName="mariadb-database-create" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.936387 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="91c93649-bf5e-4e88-8f80-5cb4f62d2b19" containerName="mariadb-database-create" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.936621 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="91c93649-bf5e-4e88-8f80-5cb4f62d2b19" containerName="mariadb-database-create" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.936642 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="28b4882c-bb24-406f-bf0f-9d788a13c40f" containerName="mariadb-account-create-update" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.937516 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.940068 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.940895 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-bwwvs" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.955356 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-dqgdr"] Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.970065 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-combined-ca-bundle\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.970165 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-job-config-data\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.970219 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-config-data\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:31 crc kubenswrapper[4876]: I1215 09:00:31.970246 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdhlq\" (UniqueName: \"kubernetes.io/projected/9d977fdb-c84c-4ec6-aae2-f97f005520ca-kube-api-access-sdhlq\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:32 crc kubenswrapper[4876]: I1215 09:00:32.071605 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-job-config-data\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:32 crc kubenswrapper[4876]: I1215 09:00:32.071672 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-config-data\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:32 crc kubenswrapper[4876]: I1215 09:00:32.071699 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdhlq\" (UniqueName: \"kubernetes.io/projected/9d977fdb-c84c-4ec6-aae2-f97f005520ca-kube-api-access-sdhlq\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:32 crc kubenswrapper[4876]: I1215 09:00:32.071809 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-combined-ca-bundle\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:32 crc kubenswrapper[4876]: I1215 09:00:32.079235 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-job-config-data\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:32 crc kubenswrapper[4876]: I1215 09:00:32.079394 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-combined-ca-bundle\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:32 crc kubenswrapper[4876]: I1215 09:00:32.086336 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdhlq\" (UniqueName: \"kubernetes.io/projected/9d977fdb-c84c-4ec6-aae2-f97f005520ca-kube-api-access-sdhlq\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:32 crc kubenswrapper[4876]: I1215 09:00:32.088467 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-config-data\") pod \"manila-db-sync-dqgdr\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:32 crc kubenswrapper[4876]: I1215 09:00:32.260727 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:33 crc kubenswrapper[4876]: I1215 09:00:33.076541 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-dqgdr"] Dec 15 09:00:33 crc kubenswrapper[4876]: W1215 09:00:33.083069 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d977fdb_c84c_4ec6_aae2_f97f005520ca.slice/crio-84ca1ae429f054ec8d2e52096ea5a0723b571e5ea85e3de3650e9fec2e45a827 WatchSource:0}: Error finding container 84ca1ae429f054ec8d2e52096ea5a0723b571e5ea85e3de3650e9fec2e45a827: Status 404 returned error can't find the container with id 84ca1ae429f054ec8d2e52096ea5a0723b571e5ea85e3de3650e9fec2e45a827 Dec 15 09:00:33 crc kubenswrapper[4876]: I1215 09:00:33.305340 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-dqgdr" event={"ID":"9d977fdb-c84c-4ec6-aae2-f97f005520ca","Type":"ContainerStarted","Data":"84ca1ae429f054ec8d2e52096ea5a0723b571e5ea85e3de3650e9fec2e45a827"} Dec 15 09:00:39 crc kubenswrapper[4876]: I1215 09:00:39.368912 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-dqgdr" event={"ID":"9d977fdb-c84c-4ec6-aae2-f97f005520ca","Type":"ContainerStarted","Data":"a60c9251fa3647ef7cefc1c85ce1c9cb6dc1d33c0bb8ed4e3363a1f6ffebe9fd"} Dec 15 09:00:39 crc kubenswrapper[4876]: I1215 09:00:39.386367 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-dqgdr" podStartSLOduration=3.132984903 podStartE2EDuration="8.386350449s" podCreationTimestamp="2025-12-15 09:00:31 +0000 UTC" firstStartedPulling="2025-12-15 09:00:33.085073883 +0000 UTC m=+7758.656216794" lastFinishedPulling="2025-12-15 09:00:38.338439439 +0000 UTC m=+7763.909582340" observedRunningTime="2025-12-15 09:00:39.383475324 +0000 UTC m=+7764.954618275" watchObservedRunningTime="2025-12-15 09:00:39.386350449 +0000 UTC m=+7764.957493360" Dec 15 09:00:41 crc kubenswrapper[4876]: I1215 09:00:41.390322 4876 generic.go:334] "Generic (PLEG): container finished" podID="9d977fdb-c84c-4ec6-aae2-f97f005520ca" containerID="a60c9251fa3647ef7cefc1c85ce1c9cb6dc1d33c0bb8ed4e3363a1f6ffebe9fd" exitCode=0 Dec 15 09:00:41 crc kubenswrapper[4876]: I1215 09:00:41.390407 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-dqgdr" event={"ID":"9d977fdb-c84c-4ec6-aae2-f97f005520ca","Type":"ContainerDied","Data":"a60c9251fa3647ef7cefc1c85ce1c9cb6dc1d33c0bb8ed4e3363a1f6ffebe9fd"} Dec 15 09:00:41 crc kubenswrapper[4876]: I1215 09:00:41.706308 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:00:41 crc kubenswrapper[4876]: E1215 09:00:41.706850 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:00:42 crc kubenswrapper[4876]: I1215 09:00:42.900285 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.006181 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-config-data\") pod \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.006621 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdhlq\" (UniqueName: \"kubernetes.io/projected/9d977fdb-c84c-4ec6-aae2-f97f005520ca-kube-api-access-sdhlq\") pod \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.006664 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-combined-ca-bundle\") pod \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.007134 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-job-config-data\") pod \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\" (UID: \"9d977fdb-c84c-4ec6-aae2-f97f005520ca\") " Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.012363 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "9d977fdb-c84c-4ec6-aae2-f97f005520ca" (UID: "9d977fdb-c84c-4ec6-aae2-f97f005520ca"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.012570 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d977fdb-c84c-4ec6-aae2-f97f005520ca-kube-api-access-sdhlq" (OuterVolumeSpecName: "kube-api-access-sdhlq") pod "9d977fdb-c84c-4ec6-aae2-f97f005520ca" (UID: "9d977fdb-c84c-4ec6-aae2-f97f005520ca"). InnerVolumeSpecName "kube-api-access-sdhlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.014672 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-config-data" (OuterVolumeSpecName: "config-data") pod "9d977fdb-c84c-4ec6-aae2-f97f005520ca" (UID: "9d977fdb-c84c-4ec6-aae2-f97f005520ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.040413 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d977fdb-c84c-4ec6-aae2-f97f005520ca" (UID: "9d977fdb-c84c-4ec6-aae2-f97f005520ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.109709 4876 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-job-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.110009 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.110020 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdhlq\" (UniqueName: \"kubernetes.io/projected/9d977fdb-c84c-4ec6-aae2-f97f005520ca-kube-api-access-sdhlq\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.110030 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d977fdb-c84c-4ec6-aae2-f97f005520ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.411078 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-dqgdr" event={"ID":"9d977fdb-c84c-4ec6-aae2-f97f005520ca","Type":"ContainerDied","Data":"84ca1ae429f054ec8d2e52096ea5a0723b571e5ea85e3de3650e9fec2e45a827"} Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.411135 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84ca1ae429f054ec8d2e52096ea5a0723b571e5ea85e3de3650e9fec2e45a827" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.411168 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-dqgdr" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.652209 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Dec 15 09:00:43 crc kubenswrapper[4876]: E1215 09:00:43.652718 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d977fdb-c84c-4ec6-aae2-f97f005520ca" containerName="manila-db-sync" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.652741 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d977fdb-c84c-4ec6-aae2-f97f005520ca" containerName="manila-db-sync" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.652972 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d977fdb-c84c-4ec6-aae2-f97f005520ca" containerName="manila-db-sync" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.654013 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.656731 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-bwwvs" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.657245 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.657626 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.657938 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.677823 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.747366 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.749083 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.753966 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.783174 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.822968 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.823032 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/48244660-aaad-48db-ba7a-ae76adc56672-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.823217 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-scripts\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.823257 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-config-data\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.823357 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlvpd\" (UniqueName: \"kubernetes.io/projected/48244660-aaad-48db-ba7a-ae76adc56672-kube-api-access-mlvpd\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.823708 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.874737 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7dfd569bdc-96wwg"] Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.876871 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.886457 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7dfd569bdc-96wwg"] Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.925564 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9db8fee6-4ebb-40cf-a695-03c92d3936d6-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.925627 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlvpd\" (UniqueName: \"kubernetes.io/projected/48244660-aaad-48db-ba7a-ae76adc56672-kube-api-access-mlvpd\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.925709 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.925755 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-config-data\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.925825 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9db8fee6-4ebb-40cf-a695-03c92d3936d6-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.925868 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-scripts\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.925891 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.925926 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc8kx\" (UniqueName: \"kubernetes.io/projected/9db8fee6-4ebb-40cf-a695-03c92d3936d6-kube-api-access-jc8kx\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.925966 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.926004 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/48244660-aaad-48db-ba7a-ae76adc56672-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.926035 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9db8fee6-4ebb-40cf-a695-03c92d3936d6-ceph\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.926061 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.926117 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-scripts\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.926142 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-config-data\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.927470 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/48244660-aaad-48db-ba7a-ae76adc56672-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.940974 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.945880 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-scripts\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.946715 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.957358 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48244660-aaad-48db-ba7a-ae76adc56672-config-data\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.965781 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlvpd\" (UniqueName: \"kubernetes.io/projected/48244660-aaad-48db-ba7a-ae76adc56672-kube-api-access-mlvpd\") pod \"manila-scheduler-0\" (UID: \"48244660-aaad-48db-ba7a-ae76adc56672\") " pod="openstack/manila-scheduler-0" Dec 15 09:00:43 crc kubenswrapper[4876]: I1215 09:00:43.985464 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034272 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-nb\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034342 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9db8fee6-4ebb-40cf-a695-03c92d3936d6-ceph\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034371 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034422 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-sb\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034458 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlvqx\" (UniqueName: \"kubernetes.io/projected/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-kube-api-access-mlvqx\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034486 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9db8fee6-4ebb-40cf-a695-03c92d3936d6-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034563 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-dns-svc\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034596 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-config-data\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034629 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9db8fee6-4ebb-40cf-a695-03c92d3936d6-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034669 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-scripts\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034724 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-config\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034758 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.034796 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc8kx\" (UniqueName: \"kubernetes.io/projected/9db8fee6-4ebb-40cf-a695-03c92d3936d6-kube-api-access-jc8kx\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.045814 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9db8fee6-4ebb-40cf-a695-03c92d3936d6-ceph\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.048933 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.049046 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9db8fee6-4ebb-40cf-a695-03c92d3936d6-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.065394 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/9db8fee6-4ebb-40cf-a695-03c92d3936d6-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.065905 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-config-data\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.079739 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-scripts\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.092258 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9db8fee6-4ebb-40cf-a695-03c92d3936d6-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.092681 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc8kx\" (UniqueName: \"kubernetes.io/projected/9db8fee6-4ebb-40cf-a695-03c92d3936d6-kube-api-access-jc8kx\") pod \"manila-share-share1-0\" (UID: \"9db8fee6-4ebb-40cf-a695-03c92d3936d6\") " pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.144767 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-dns-svc\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.144870 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-config\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.144923 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-nb\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.144968 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-sb\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.144994 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlvqx\" (UniqueName: \"kubernetes.io/projected/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-kube-api-access-mlvqx\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.147121 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-dns-svc\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.147712 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-config\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.149469 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-sb\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.160566 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-nb\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.214051 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlvqx\" (UniqueName: \"kubernetes.io/projected/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-kube-api-access-mlvqx\") pod \"dnsmasq-dns-7dfd569bdc-96wwg\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.273070 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.274890 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.281527 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.310398 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.378527 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.455709 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-config-data\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.456143 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.456193 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-config-data-custom\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.456213 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-scripts\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.456271 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc3003f4-6959-4f1e-8c14-9a17f56e5112-logs\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.456295 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cc3003f4-6959-4f1e-8c14-9a17f56e5112-etc-machine-id\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.456323 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmdsv\" (UniqueName: \"kubernetes.io/projected/cc3003f4-6959-4f1e-8c14-9a17f56e5112-kube-api-access-dmdsv\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.497658 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.559295 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc3003f4-6959-4f1e-8c14-9a17f56e5112-logs\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.559386 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cc3003f4-6959-4f1e-8c14-9a17f56e5112-etc-machine-id\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.559437 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmdsv\" (UniqueName: \"kubernetes.io/projected/cc3003f4-6959-4f1e-8c14-9a17f56e5112-kube-api-access-dmdsv\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.559551 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-config-data\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.559678 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.559753 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-config-data-custom\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.559782 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-scripts\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.561006 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cc3003f4-6959-4f1e-8c14-9a17f56e5112-etc-machine-id\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.561458 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc3003f4-6959-4f1e-8c14-9a17f56e5112-logs\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.568005 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-scripts\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.568801 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-config-data\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.569736 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-config-data-custom\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.569777 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc3003f4-6959-4f1e-8c14-9a17f56e5112-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.583706 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmdsv\" (UniqueName: \"kubernetes.io/projected/cc3003f4-6959-4f1e-8c14-9a17f56e5112-kube-api-access-dmdsv\") pod \"manila-api-0\" (UID: \"cc3003f4-6959-4f1e-8c14-9a17f56e5112\") " pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.625616 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Dec 15 09:00:44 crc kubenswrapper[4876]: I1215 09:00:44.849341 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Dec 15 09:00:45 crc kubenswrapper[4876]: I1215 09:00:45.106742 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Dec 15 09:00:45 crc kubenswrapper[4876]: I1215 09:00:45.221411 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7dfd569bdc-96wwg"] Dec 15 09:00:45 crc kubenswrapper[4876]: I1215 09:00:45.393657 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Dec 15 09:00:45 crc kubenswrapper[4876]: I1215 09:00:45.451026 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" event={"ID":"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f","Type":"ContainerStarted","Data":"b5a21428a540cf909b0b17d761d4d045cdb410eb06b0ee014740354c40337472"} Dec 15 09:00:45 crc kubenswrapper[4876]: I1215 09:00:45.454840 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"48244660-aaad-48db-ba7a-ae76adc56672","Type":"ContainerStarted","Data":"3efd57fe2f636841cddfedc3296c600a1fc8b378809eb83238f8fd062d717a85"} Dec 15 09:00:45 crc kubenswrapper[4876]: I1215 09:00:45.462330 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9db8fee6-4ebb-40cf-a695-03c92d3936d6","Type":"ContainerStarted","Data":"6a4682546115496da6a5ccf6bb59c780d3a8cb5982ee262dfa4cc504d3709361"} Dec 15 09:00:45 crc kubenswrapper[4876]: I1215 09:00:45.466258 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"cc3003f4-6959-4f1e-8c14-9a17f56e5112","Type":"ContainerStarted","Data":"2aaf2fcebd7fbe01b9ea0aa6d0895165d6cb999a5d3423765aee9b91182de7de"} Dec 15 09:00:46 crc kubenswrapper[4876]: I1215 09:00:46.484887 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"48244660-aaad-48db-ba7a-ae76adc56672","Type":"ContainerStarted","Data":"2a08946f1168aa0a8aed4eb4d08bc020f1b84dd2d26429a610c9223288ce33f1"} Dec 15 09:00:46 crc kubenswrapper[4876]: I1215 09:00:46.485329 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"48244660-aaad-48db-ba7a-ae76adc56672","Type":"ContainerStarted","Data":"5970a7d287ac0b3b41dfc90ccf6ec27c0a85d823d2c137f245e026328f236842"} Dec 15 09:00:46 crc kubenswrapper[4876]: I1215 09:00:46.492687 4876 generic.go:334] "Generic (PLEG): container finished" podID="a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" containerID="c7eda1d36623b011abd56d9129e2fcb7b465bf74716383aba24096d933e456e8" exitCode=0 Dec 15 09:00:46 crc kubenswrapper[4876]: I1215 09:00:46.492815 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" event={"ID":"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f","Type":"ContainerDied","Data":"c7eda1d36623b011abd56d9129e2fcb7b465bf74716383aba24096d933e456e8"} Dec 15 09:00:46 crc kubenswrapper[4876]: I1215 09:00:46.499094 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"cc3003f4-6959-4f1e-8c14-9a17f56e5112","Type":"ContainerStarted","Data":"544cd6435c598241c1259a2345275c72118e1e0655bd82742781bb3db5e6c529"} Dec 15 09:00:46 crc kubenswrapper[4876]: I1215 09:00:46.499148 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"cc3003f4-6959-4f1e-8c14-9a17f56e5112","Type":"ContainerStarted","Data":"80076460451faeecee8057847a2ae208a35227e4a5ead3301fcd453235af80f4"} Dec 15 09:00:46 crc kubenswrapper[4876]: I1215 09:00:46.499822 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Dec 15 09:00:46 crc kubenswrapper[4876]: I1215 09:00:46.516347 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.16021027 podStartE2EDuration="3.516330649s" podCreationTimestamp="2025-12-15 09:00:43 +0000 UTC" firstStartedPulling="2025-12-15 09:00:44.840414416 +0000 UTC m=+7770.411557327" lastFinishedPulling="2025-12-15 09:00:45.196534795 +0000 UTC m=+7770.767677706" observedRunningTime="2025-12-15 09:00:46.512512999 +0000 UTC m=+7772.083655920" watchObservedRunningTime="2025-12-15 09:00:46.516330649 +0000 UTC m=+7772.087473560" Dec 15 09:00:46 crc kubenswrapper[4876]: I1215 09:00:46.576615 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=2.576591325 podStartE2EDuration="2.576591325s" podCreationTimestamp="2025-12-15 09:00:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:00:46.534329953 +0000 UTC m=+7772.105472884" watchObservedRunningTime="2025-12-15 09:00:46.576591325 +0000 UTC m=+7772.147734246" Dec 15 09:00:47 crc kubenswrapper[4876]: I1215 09:00:47.513730 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" event={"ID":"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f","Type":"ContainerStarted","Data":"9960240d29fea5fcc4852dc11304fa595e307ea10b7fa513308896cb67b05db0"} Dec 15 09:00:47 crc kubenswrapper[4876]: I1215 09:00:47.535862 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" podStartSLOduration=4.535842133 podStartE2EDuration="4.535842133s" podCreationTimestamp="2025-12-15 09:00:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:00:47.53005388 +0000 UTC m=+7773.101196791" watchObservedRunningTime="2025-12-15 09:00:47.535842133 +0000 UTC m=+7773.106985044" Dec 15 09:00:48 crc kubenswrapper[4876]: I1215 09:00:48.536689 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:50 crc kubenswrapper[4876]: I1215 09:00:50.056008 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-k9fqd"] Dec 15 09:00:50 crc kubenswrapper[4876]: I1215 09:00:50.066790 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-k9fqd"] Dec 15 09:00:50 crc kubenswrapper[4876]: I1215 09:00:50.718875 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0c31a53-ff2a-47f9-9ce2-5083836b9871" path="/var/lib/kubelet/pods/a0c31a53-ff2a-47f9-9ce2-5083836b9871/volumes" Dec 15 09:00:51 crc kubenswrapper[4876]: I1215 09:00:51.029353 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-ef5a-account-create-update-rqdkt"] Dec 15 09:00:51 crc kubenswrapper[4876]: I1215 09:00:51.040517 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-ef5a-account-create-update-rqdkt"] Dec 15 09:00:52 crc kubenswrapper[4876]: I1215 09:00:52.585140 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 15 09:00:52 crc kubenswrapper[4876]: I1215 09:00:52.707481 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:00:52 crc kubenswrapper[4876]: E1215 09:00:52.708119 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:00:52 crc kubenswrapper[4876]: I1215 09:00:52.746510 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d62244f-373f-4a19-a482-c0d99bf9970c" path="/var/lib/kubelet/pods/8d62244f-373f-4a19-a482-c0d99bf9970c/volumes" Dec 15 09:00:53 crc kubenswrapper[4876]: I1215 09:00:53.988888 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Dec 15 09:00:54 crc kubenswrapper[4876]: I1215 09:00:54.499321 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:00:54 crc kubenswrapper[4876]: I1215 09:00:54.574901 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9b7c74d49-6d99r"] Dec 15 09:00:54 crc kubenswrapper[4876]: I1215 09:00:54.578433 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" podUID="07694812-cd06-4758-9694-5c522ed85202" containerName="dnsmasq-dns" containerID="cri-o://c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128" gracePeriod=10 Dec 15 09:00:54 crc kubenswrapper[4876]: I1215 09:00:54.630533 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9db8fee6-4ebb-40cf-a695-03c92d3936d6","Type":"ContainerStarted","Data":"3e16997dcbe63f00f01399db2d2d74bd5d60f992fc751f3b3e9e7fb83a975106"} Dec 15 09:00:54 crc kubenswrapper[4876]: I1215 09:00:54.630570 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"9db8fee6-4ebb-40cf-a695-03c92d3936d6","Type":"ContainerStarted","Data":"a47fe518ca1cceca2ab22e08697d338b44f9acb2a5fc48697eb00d7a74d1b5e2"} Dec 15 09:00:54 crc kubenswrapper[4876]: I1215 09:00:54.663849 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.5758716059999998 podStartE2EDuration="11.663828049s" podCreationTimestamp="2025-12-15 09:00:43 +0000 UTC" firstStartedPulling="2025-12-15 09:00:45.123776601 +0000 UTC m=+7770.694919512" lastFinishedPulling="2025-12-15 09:00:53.211733014 +0000 UTC m=+7778.782875955" observedRunningTime="2025-12-15 09:00:54.652965023 +0000 UTC m=+7780.224107964" watchObservedRunningTime="2025-12-15 09:00:54.663828049 +0000 UTC m=+7780.234970960" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.133015 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.214279 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-dns-svc\") pod \"07694812-cd06-4758-9694-5c522ed85202\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.214327 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-config\") pod \"07694812-cd06-4758-9694-5c522ed85202\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.214396 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-sb\") pod \"07694812-cd06-4758-9694-5c522ed85202\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.214535 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6ff6\" (UniqueName: \"kubernetes.io/projected/07694812-cd06-4758-9694-5c522ed85202-kube-api-access-x6ff6\") pod \"07694812-cd06-4758-9694-5c522ed85202\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.214570 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-nb\") pod \"07694812-cd06-4758-9694-5c522ed85202\" (UID: \"07694812-cd06-4758-9694-5c522ed85202\") " Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.249886 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07694812-cd06-4758-9694-5c522ed85202-kube-api-access-x6ff6" (OuterVolumeSpecName: "kube-api-access-x6ff6") pod "07694812-cd06-4758-9694-5c522ed85202" (UID: "07694812-cd06-4758-9694-5c522ed85202"). InnerVolumeSpecName "kube-api-access-x6ff6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.288730 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "07694812-cd06-4758-9694-5c522ed85202" (UID: "07694812-cd06-4758-9694-5c522ed85202"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.295500 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-config" (OuterVolumeSpecName: "config") pod "07694812-cd06-4758-9694-5c522ed85202" (UID: "07694812-cd06-4758-9694-5c522ed85202"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.300782 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "07694812-cd06-4758-9694-5c522ed85202" (UID: "07694812-cd06-4758-9694-5c522ed85202"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.317468 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6ff6\" (UniqueName: \"kubernetes.io/projected/07694812-cd06-4758-9694-5c522ed85202-kube-api-access-x6ff6\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.317499 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.317508 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-config\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.317518 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.323814 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "07694812-cd06-4758-9694-5c522ed85202" (UID: "07694812-cd06-4758-9694-5c522ed85202"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.419867 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/07694812-cd06-4758-9694-5c522ed85202-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.642848 4876 generic.go:334] "Generic (PLEG): container finished" podID="07694812-cd06-4758-9694-5c522ed85202" containerID="c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128" exitCode=0 Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.642959 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.643034 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" event={"ID":"07694812-cd06-4758-9694-5c522ed85202","Type":"ContainerDied","Data":"c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128"} Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.643071 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b7c74d49-6d99r" event={"ID":"07694812-cd06-4758-9694-5c522ed85202","Type":"ContainerDied","Data":"8a90761af304c701aa52179de12c8f34186df5a81a5e7d6c913ebc0ce705831b"} Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.643089 4876 scope.go:117] "RemoveContainer" containerID="c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.688725 4876 scope.go:117] "RemoveContainer" containerID="f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.712969 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9b7c74d49-6d99r"] Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.733153 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9b7c74d49-6d99r"] Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.764463 4876 scope.go:117] "RemoveContainer" containerID="c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128" Dec 15 09:00:55 crc kubenswrapper[4876]: E1215 09:00:55.764985 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128\": container with ID starting with c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128 not found: ID does not exist" containerID="c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.765019 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128"} err="failed to get container status \"c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128\": rpc error: code = NotFound desc = could not find container \"c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128\": container with ID starting with c9870e3e8528f5b8e3dab533587981faaf9a2514322383fb1f8626ccd673e128 not found: ID does not exist" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.765041 4876 scope.go:117] "RemoveContainer" containerID="f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd" Dec 15 09:00:55 crc kubenswrapper[4876]: E1215 09:00:55.768197 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd\": container with ID starting with f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd not found: ID does not exist" containerID="f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd" Dec 15 09:00:55 crc kubenswrapper[4876]: I1215 09:00:55.768231 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd"} err="failed to get container status \"f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd\": rpc error: code = NotFound desc = could not find container \"f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd\": container with ID starting with f8a32ec76704f7d7f85bafe8f864cfbfa4973cab62e6a5d642cad59aa21b25dd not found: ID does not exist" Dec 15 09:00:56 crc kubenswrapper[4876]: I1215 09:00:56.748766 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07694812-cd06-4758-9694-5c522ed85202" path="/var/lib/kubelet/pods/07694812-cd06-4758-9694-5c522ed85202/volumes" Dec 15 09:00:57 crc kubenswrapper[4876]: I1215 09:00:57.525343 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:00:57 crc kubenswrapper[4876]: I1215 09:00:57.525967 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="ceilometer-central-agent" containerID="cri-o://81be2cbd8493d396ade0f9bee2eefdcdcdd6ba7d42912332a33c75f4da674739" gracePeriod=30 Dec 15 09:00:57 crc kubenswrapper[4876]: I1215 09:00:57.526120 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="ceilometer-notification-agent" containerID="cri-o://1f83666543781c1ff70a28c0ddb3a10976cf83c6c49c7dc46f323def2410889a" gracePeriod=30 Dec 15 09:00:57 crc kubenswrapper[4876]: I1215 09:00:57.526099 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="sg-core" containerID="cri-o://57af1a8ed7437691ebb67946f3183d8caa6235cbc5fcb53599cbf60edd4eaf6e" gracePeriod=30 Dec 15 09:00:57 crc kubenswrapper[4876]: I1215 09:00:57.526172 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="proxy-httpd" containerID="cri-o://a6441b704cb64ee0ddf0e12aabd72a9f0dd99d3c32d711354a71087abf11fb72" gracePeriod=30 Dec 15 09:00:57 crc kubenswrapper[4876]: I1215 09:00:57.667940 4876 generic.go:334] "Generic (PLEG): container finished" podID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerID="a6441b704cb64ee0ddf0e12aabd72a9f0dd99d3c32d711354a71087abf11fb72" exitCode=0 Dec 15 09:00:57 crc kubenswrapper[4876]: I1215 09:00:57.667979 4876 generic.go:334] "Generic (PLEG): container finished" podID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerID="57af1a8ed7437691ebb67946f3183d8caa6235cbc5fcb53599cbf60edd4eaf6e" exitCode=2 Dec 15 09:00:57 crc kubenswrapper[4876]: I1215 09:00:57.668005 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c671dbf8-00c5-44c7-9f64-4153af8e8e48","Type":"ContainerDied","Data":"a6441b704cb64ee0ddf0e12aabd72a9f0dd99d3c32d711354a71087abf11fb72"} Dec 15 09:00:57 crc kubenswrapper[4876]: I1215 09:00:57.668063 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c671dbf8-00c5-44c7-9f64-4153af8e8e48","Type":"ContainerDied","Data":"57af1a8ed7437691ebb67946f3183d8caa6235cbc5fcb53599cbf60edd4eaf6e"} Dec 15 09:00:58 crc kubenswrapper[4876]: I1215 09:00:58.679711 4876 generic.go:334] "Generic (PLEG): container finished" podID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerID="81be2cbd8493d396ade0f9bee2eefdcdcdd6ba7d42912332a33c75f4da674739" exitCode=0 Dec 15 09:00:58 crc kubenswrapper[4876]: I1215 09:00:58.680055 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c671dbf8-00c5-44c7-9f64-4153af8e8e48","Type":"ContainerDied","Data":"81be2cbd8493d396ade0f9bee2eefdcdcdd6ba7d42912332a33c75f4da674739"} Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.034025 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-w88rl"] Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.049727 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-w88rl"] Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.150341 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29429821-l6qcz"] Dec 15 09:01:00 crc kubenswrapper[4876]: E1215 09:01:00.150820 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07694812-cd06-4758-9694-5c522ed85202" containerName="init" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.150842 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="07694812-cd06-4758-9694-5c522ed85202" containerName="init" Dec 15 09:01:00 crc kubenswrapper[4876]: E1215 09:01:00.150878 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07694812-cd06-4758-9694-5c522ed85202" containerName="dnsmasq-dns" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.150891 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="07694812-cd06-4758-9694-5c522ed85202" containerName="dnsmasq-dns" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.151275 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="07694812-cd06-4758-9694-5c522ed85202" containerName="dnsmasq-dns" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.152079 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.159984 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29429821-l6qcz"] Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.228752 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78tnf\" (UniqueName: \"kubernetes.io/projected/62fa6354-9ea9-4586-814b-6f4e9dd53da2-kube-api-access-78tnf\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.228944 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-fernet-keys\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.228985 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-config-data\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.229011 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-combined-ca-bundle\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.331678 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78tnf\" (UniqueName: \"kubernetes.io/projected/62fa6354-9ea9-4586-814b-6f4e9dd53da2-kube-api-access-78tnf\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.331900 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-fernet-keys\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.331991 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-config-data\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.332025 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-combined-ca-bundle\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.338312 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-fernet-keys\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.338395 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-combined-ca-bundle\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.338920 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-config-data\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.352502 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78tnf\" (UniqueName: \"kubernetes.io/projected/62fa6354-9ea9-4586-814b-6f4e9dd53da2-kube-api-access-78tnf\") pod \"keystone-cron-29429821-l6qcz\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.471379 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:00 crc kubenswrapper[4876]: I1215 09:01:00.716532 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0be3c8f4-50fe-4da6-8afb-4fa22a938ea6" path="/var/lib/kubelet/pods/0be3c8f4-50fe-4da6-8afb-4fa22a938ea6/volumes" Dec 15 09:01:01 crc kubenswrapper[4876]: I1215 09:01:01.110210 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29429821-l6qcz"] Dec 15 09:01:01 crc kubenswrapper[4876]: I1215 09:01:01.710922 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29429821-l6qcz" event={"ID":"62fa6354-9ea9-4586-814b-6f4e9dd53da2","Type":"ContainerStarted","Data":"dcb835a4671ffbc38efec2a3bbc3a4b1c5be0cfc532f66e0a2a4d80ba154e292"} Dec 15 09:01:01 crc kubenswrapper[4876]: I1215 09:01:01.711557 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29429821-l6qcz" event={"ID":"62fa6354-9ea9-4586-814b-6f4e9dd53da2","Type":"ContainerStarted","Data":"32baf14b79bfafcac99097d13a38f7444e6664b7f3dd08464a90e622f0904dda"} Dec 15 09:01:01 crc kubenswrapper[4876]: I1215 09:01:01.719431 4876 generic.go:334] "Generic (PLEG): container finished" podID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerID="1f83666543781c1ff70a28c0ddb3a10976cf83c6c49c7dc46f323def2410889a" exitCode=0 Dec 15 09:01:01 crc kubenswrapper[4876]: I1215 09:01:01.719486 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c671dbf8-00c5-44c7-9f64-4153af8e8e48","Type":"ContainerDied","Data":"1f83666543781c1ff70a28c0ddb3a10976cf83c6c49c7dc46f323def2410889a"} Dec 15 09:01:01 crc kubenswrapper[4876]: I1215 09:01:01.983598 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.023491 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29429821-l6qcz" podStartSLOduration=2.02346481 podStartE2EDuration="2.02346481s" podCreationTimestamp="2025-12-15 09:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:01:01.746906815 +0000 UTC m=+7787.318049726" watchObservedRunningTime="2025-12-15 09:01:02.02346481 +0000 UTC m=+7787.594607721" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.077417 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-log-httpd\") pod \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.077532 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-config-data\") pod \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.077624 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4b47\" (UniqueName: \"kubernetes.io/projected/c671dbf8-00c5-44c7-9f64-4153af8e8e48-kube-api-access-k4b47\") pod \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.077669 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-combined-ca-bundle\") pod \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.077868 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-run-httpd\") pod \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.078010 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-scripts\") pod \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.078155 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-sg-core-conf-yaml\") pod \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\" (UID: \"c671dbf8-00c5-44c7-9f64-4153af8e8e48\") " Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.078213 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c671dbf8-00c5-44c7-9f64-4153af8e8e48" (UID: "c671dbf8-00c5-44c7-9f64-4153af8e8e48"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.078854 4876 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.087819 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c671dbf8-00c5-44c7-9f64-4153af8e8e48" (UID: "c671dbf8-00c5-44c7-9f64-4153af8e8e48"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.088484 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c671dbf8-00c5-44c7-9f64-4153af8e8e48-kube-api-access-k4b47" (OuterVolumeSpecName: "kube-api-access-k4b47") pod "c671dbf8-00c5-44c7-9f64-4153af8e8e48" (UID: "c671dbf8-00c5-44c7-9f64-4153af8e8e48"). InnerVolumeSpecName "kube-api-access-k4b47". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.093160 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-scripts" (OuterVolumeSpecName: "scripts") pod "c671dbf8-00c5-44c7-9f64-4153af8e8e48" (UID: "c671dbf8-00c5-44c7-9f64-4153af8e8e48"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.178273 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c671dbf8-00c5-44c7-9f64-4153af8e8e48" (UID: "c671dbf8-00c5-44c7-9f64-4153af8e8e48"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.182765 4876 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-scripts\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.182842 4876 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.182869 4876 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c671dbf8-00c5-44c7-9f64-4153af8e8e48-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.182908 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4b47\" (UniqueName: \"kubernetes.io/projected/c671dbf8-00c5-44c7-9f64-4153af8e8e48-kube-api-access-k4b47\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.200204 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c671dbf8-00c5-44c7-9f64-4153af8e8e48" (UID: "c671dbf8-00c5-44c7-9f64-4153af8e8e48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.252321 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-config-data" (OuterVolumeSpecName: "config-data") pod "c671dbf8-00c5-44c7-9f64-4153af8e8e48" (UID: "c671dbf8-00c5-44c7-9f64-4153af8e8e48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.285317 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.285366 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c671dbf8-00c5-44c7-9f64-4153af8e8e48-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.732341 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.736430 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c671dbf8-00c5-44c7-9f64-4153af8e8e48","Type":"ContainerDied","Data":"4b8b859b77dc11d8fa4b81d9d30b8837e0f85063c27dffd2a98697c04f232907"} Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.736500 4876 scope.go:117] "RemoveContainer" containerID="a6441b704cb64ee0ddf0e12aabd72a9f0dd99d3c32d711354a71087abf11fb72" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.768997 4876 scope.go:117] "RemoveContainer" containerID="57af1a8ed7437691ebb67946f3183d8caa6235cbc5fcb53599cbf60edd4eaf6e" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.771623 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.782618 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.790967 4876 scope.go:117] "RemoveContainer" containerID="1f83666543781c1ff70a28c0ddb3a10976cf83c6c49c7dc46f323def2410889a" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.806231 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:01:02 crc kubenswrapper[4876]: E1215 09:01:02.806710 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="ceilometer-central-agent" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.806731 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="ceilometer-central-agent" Dec 15 09:01:02 crc kubenswrapper[4876]: E1215 09:01:02.806749 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="proxy-httpd" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.806756 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="proxy-httpd" Dec 15 09:01:02 crc kubenswrapper[4876]: E1215 09:01:02.806766 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="sg-core" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.806774 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="sg-core" Dec 15 09:01:02 crc kubenswrapper[4876]: E1215 09:01:02.806794 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="ceilometer-notification-agent" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.806800 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="ceilometer-notification-agent" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.806997 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="ceilometer-notification-agent" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.807018 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="sg-core" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.807030 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="proxy-httpd" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.807056 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" containerName="ceilometer-central-agent" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.809085 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.812416 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.812523 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.819211 4876 scope.go:117] "RemoveContainer" containerID="81be2cbd8493d396ade0f9bee2eefdcdcdd6ba7d42912332a33c75f4da674739" Dec 15 09:01:02 crc kubenswrapper[4876]: I1215 09:01:02.846236 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.001009 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.001072 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3244439e-cc67-4e8d-abc0-1907df4aa1c7-run-httpd\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.001150 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.001177 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dx2r\" (UniqueName: \"kubernetes.io/projected/3244439e-cc67-4e8d-abc0-1907df4aa1c7-kube-api-access-4dx2r\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.001507 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-config-data\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.001644 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-scripts\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.002259 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3244439e-cc67-4e8d-abc0-1907df4aa1c7-log-httpd\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.103732 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dx2r\" (UniqueName: \"kubernetes.io/projected/3244439e-cc67-4e8d-abc0-1907df4aa1c7-kube-api-access-4dx2r\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.103836 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-config-data\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.103890 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-scripts\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.103917 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3244439e-cc67-4e8d-abc0-1907df4aa1c7-log-httpd\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.103947 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.103971 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3244439e-cc67-4e8d-abc0-1907df4aa1c7-run-httpd\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.104017 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.104733 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3244439e-cc67-4e8d-abc0-1907df4aa1c7-log-httpd\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.105634 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3244439e-cc67-4e8d-abc0-1907df4aa1c7-run-httpd\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.108582 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-config-data\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.109735 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.110307 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.120879 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3244439e-cc67-4e8d-abc0-1907df4aa1c7-scripts\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.121627 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dx2r\" (UniqueName: \"kubernetes.io/projected/3244439e-cc67-4e8d-abc0-1907df4aa1c7-kube-api-access-4dx2r\") pod \"ceilometer-0\" (UID: \"3244439e-cc67-4e8d-abc0-1907df4aa1c7\") " pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.163152 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.725242 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 15 09:01:03 crc kubenswrapper[4876]: I1215 09:01:03.748496 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3244439e-cc67-4e8d-abc0-1907df4aa1c7","Type":"ContainerStarted","Data":"e148df32d99f4ae4c86ef74f212ae5077bb7a6395935628a4f53dfcf5c400888"} Dec 15 09:01:04 crc kubenswrapper[4876]: I1215 09:01:04.380283 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Dec 15 09:01:04 crc kubenswrapper[4876]: I1215 09:01:04.726219 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c671dbf8-00c5-44c7-9f64-4153af8e8e48" path="/var/lib/kubelet/pods/c671dbf8-00c5-44c7-9f64-4153af8e8e48/volumes" Dec 15 09:01:04 crc kubenswrapper[4876]: I1215 09:01:04.808862 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3244439e-cc67-4e8d-abc0-1907df4aa1c7","Type":"ContainerStarted","Data":"b37800dc0aaeb4ff7397178b897b1c3168cc0a12c13f248d82ec38620c8fbb5f"} Dec 15 09:01:04 crc kubenswrapper[4876]: I1215 09:01:04.808914 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3244439e-cc67-4e8d-abc0-1907df4aa1c7","Type":"ContainerStarted","Data":"88c47b2ce68c7db1dac75cf0bb1aeb40936d0b91f9c3ac21659ac961826f40e1"} Dec 15 09:01:04 crc kubenswrapper[4876]: I1215 09:01:04.814569 4876 generic.go:334] "Generic (PLEG): container finished" podID="62fa6354-9ea9-4586-814b-6f4e9dd53da2" containerID="dcb835a4671ffbc38efec2a3bbc3a4b1c5be0cfc532f66e0a2a4d80ba154e292" exitCode=0 Dec 15 09:01:04 crc kubenswrapper[4876]: I1215 09:01:04.814635 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29429821-l6qcz" event={"ID":"62fa6354-9ea9-4586-814b-6f4e9dd53da2","Type":"ContainerDied","Data":"dcb835a4671ffbc38efec2a3bbc3a4b1c5be0cfc532f66e0a2a4d80ba154e292"} Dec 15 09:01:05 crc kubenswrapper[4876]: I1215 09:01:05.950869 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.357331 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.445569 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.470992 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.546360 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-config-data\") pod \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.546511 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-combined-ca-bundle\") pod \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.546685 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78tnf\" (UniqueName: \"kubernetes.io/projected/62fa6354-9ea9-4586-814b-6f4e9dd53da2-kube-api-access-78tnf\") pod \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.546766 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-fernet-keys\") pod \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\" (UID: \"62fa6354-9ea9-4586-814b-6f4e9dd53da2\") " Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.562969 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62fa6354-9ea9-4586-814b-6f4e9dd53da2-kube-api-access-78tnf" (OuterVolumeSpecName: "kube-api-access-78tnf") pod "62fa6354-9ea9-4586-814b-6f4e9dd53da2" (UID: "62fa6354-9ea9-4586-814b-6f4e9dd53da2"). InnerVolumeSpecName "kube-api-access-78tnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.578284 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "62fa6354-9ea9-4586-814b-6f4e9dd53da2" (UID: "62fa6354-9ea9-4586-814b-6f4e9dd53da2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.589320 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62fa6354-9ea9-4586-814b-6f4e9dd53da2" (UID: "62fa6354-9ea9-4586-814b-6f4e9dd53da2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.615793 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-config-data" (OuterVolumeSpecName: "config-data") pod "62fa6354-9ea9-4586-814b-6f4e9dd53da2" (UID: "62fa6354-9ea9-4586-814b-6f4e9dd53da2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.648848 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78tnf\" (UniqueName: \"kubernetes.io/projected/62fa6354-9ea9-4586-814b-6f4e9dd53da2-kube-api-access-78tnf\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.648890 4876 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.648903 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.648914 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62fa6354-9ea9-4586-814b-6f4e9dd53da2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.838894 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3244439e-cc67-4e8d-abc0-1907df4aa1c7","Type":"ContainerStarted","Data":"183026c8fe294a66dab6563d3878f51d5e6d38fdce19842a3a7adcaebf27e97f"} Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.841703 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29429821-l6qcz" event={"ID":"62fa6354-9ea9-4586-814b-6f4e9dd53da2","Type":"ContainerDied","Data":"32baf14b79bfafcac99097d13a38f7444e6664b7f3dd08464a90e622f0904dda"} Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.841740 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32baf14b79bfafcac99097d13a38f7444e6664b7f3dd08464a90e622f0904dda" Dec 15 09:01:06 crc kubenswrapper[4876]: I1215 09:01:06.841766 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29429821-l6qcz" Dec 15 09:01:07 crc kubenswrapper[4876]: I1215 09:01:07.706584 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:01:07 crc kubenswrapper[4876]: E1215 09:01:07.707207 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:01:08 crc kubenswrapper[4876]: I1215 09:01:08.868981 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3244439e-cc67-4e8d-abc0-1907df4aa1c7","Type":"ContainerStarted","Data":"bffcb86f0e465887298b6b4dbf476131e83a8bce95884cd6d23d39d59cc8a492"} Dec 15 09:01:08 crc kubenswrapper[4876]: I1215 09:01:08.870794 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 15 09:01:08 crc kubenswrapper[4876]: I1215 09:01:08.895904 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.460998282 podStartE2EDuration="6.895888584s" podCreationTimestamp="2025-12-15 09:01:02 +0000 UTC" firstStartedPulling="2025-12-15 09:01:03.730297567 +0000 UTC m=+7789.301440478" lastFinishedPulling="2025-12-15 09:01:08.165187869 +0000 UTC m=+7793.736330780" observedRunningTime="2025-12-15 09:01:08.89271682 +0000 UTC m=+7794.463859751" watchObservedRunningTime="2025-12-15 09:01:08.895888584 +0000 UTC m=+7794.467031495" Dec 15 09:01:18 crc kubenswrapper[4876]: I1215 09:01:18.705837 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:01:18 crc kubenswrapper[4876]: E1215 09:01:18.706664 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:01:29 crc kubenswrapper[4876]: I1215 09:01:29.217902 4876 scope.go:117] "RemoveContainer" containerID="d27634d6bf9b83053d6b04edc16da9afce20cc4ef46a924ce82cbd961f760ae2" Dec 15 09:01:29 crc kubenswrapper[4876]: I1215 09:01:29.243288 4876 scope.go:117] "RemoveContainer" containerID="c021ab5eb1c92eecc2b6bd97ce31e03455de6057f5dba9063d5b35ee0dbf4136" Dec 15 09:01:29 crc kubenswrapper[4876]: I1215 09:01:29.310496 4876 scope.go:117] "RemoveContainer" containerID="dd4eefd3e6d82bb36b8f90fe3fd3b89f5f0885972cc2138896eef13a8becef11" Dec 15 09:01:29 crc kubenswrapper[4876]: I1215 09:01:29.706128 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:01:29 crc kubenswrapper[4876]: E1215 09:01:29.706660 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:01:33 crc kubenswrapper[4876]: I1215 09:01:33.168886 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 15 09:01:42 crc kubenswrapper[4876]: I1215 09:01:42.705447 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:01:42 crc kubenswrapper[4876]: E1215 09:01:42.706351 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:01:51 crc kubenswrapper[4876]: I1215 09:01:51.805784 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5qdz4"] Dec 15 09:01:51 crc kubenswrapper[4876]: E1215 09:01:51.806865 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62fa6354-9ea9-4586-814b-6f4e9dd53da2" containerName="keystone-cron" Dec 15 09:01:51 crc kubenswrapper[4876]: I1215 09:01:51.806882 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="62fa6354-9ea9-4586-814b-6f4e9dd53da2" containerName="keystone-cron" Dec 15 09:01:51 crc kubenswrapper[4876]: I1215 09:01:51.807365 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="62fa6354-9ea9-4586-814b-6f4e9dd53da2" containerName="keystone-cron" Dec 15 09:01:51 crc kubenswrapper[4876]: I1215 09:01:51.809399 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:51 crc kubenswrapper[4876]: I1215 09:01:51.816093 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5qdz4"] Dec 15 09:01:51 crc kubenswrapper[4876]: I1215 09:01:51.967930 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ls6p\" (UniqueName: \"kubernetes.io/projected/b361c986-ec90-447f-a50a-c167ee299c25-kube-api-access-8ls6p\") pod \"redhat-marketplace-5qdz4\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:51 crc kubenswrapper[4876]: I1215 09:01:51.967999 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-catalog-content\") pod \"redhat-marketplace-5qdz4\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:51 crc kubenswrapper[4876]: I1215 09:01:51.968062 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-utilities\") pod \"redhat-marketplace-5qdz4\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:52 crc kubenswrapper[4876]: I1215 09:01:52.070074 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ls6p\" (UniqueName: \"kubernetes.io/projected/b361c986-ec90-447f-a50a-c167ee299c25-kube-api-access-8ls6p\") pod \"redhat-marketplace-5qdz4\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:52 crc kubenswrapper[4876]: I1215 09:01:52.070188 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-catalog-content\") pod \"redhat-marketplace-5qdz4\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:52 crc kubenswrapper[4876]: I1215 09:01:52.070278 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-utilities\") pod \"redhat-marketplace-5qdz4\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:52 crc kubenswrapper[4876]: I1215 09:01:52.070868 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-catalog-content\") pod \"redhat-marketplace-5qdz4\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:52 crc kubenswrapper[4876]: I1215 09:01:52.071018 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-utilities\") pod \"redhat-marketplace-5qdz4\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:52 crc kubenswrapper[4876]: I1215 09:01:52.089930 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ls6p\" (UniqueName: \"kubernetes.io/projected/b361c986-ec90-447f-a50a-c167ee299c25-kube-api-access-8ls6p\") pod \"redhat-marketplace-5qdz4\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:52 crc kubenswrapper[4876]: I1215 09:01:52.135849 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:01:52 crc kubenswrapper[4876]: I1215 09:01:52.623523 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5qdz4"] Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.343036 4876 generic.go:334] "Generic (PLEG): container finished" podID="b361c986-ec90-447f-a50a-c167ee299c25" containerID="8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff" exitCode=0 Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.343452 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qdz4" event={"ID":"b361c986-ec90-447f-a50a-c167ee299c25","Type":"ContainerDied","Data":"8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff"} Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.343489 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qdz4" event={"ID":"b361c986-ec90-447f-a50a-c167ee299c25","Type":"ContainerStarted","Data":"61e5480de518f19c2935b723426761482a3d5d496ac3730b45038adf5fb366ea"} Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.366528 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b5589867c-59275"] Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.368712 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.372038 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.404526 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b5589867c-59275"] Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.503534 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-openstack-cell1\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.503970 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-nb\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.504005 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-dns-svc\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.504041 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-config\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.504313 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-sb\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.504582 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnl74\" (UniqueName: \"kubernetes.io/projected/29a0733c-c283-49e8-8598-b6d5599ee332-kube-api-access-fnl74\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.510256 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b5589867c-59275"] Dec 15 09:01:53 crc kubenswrapper[4876]: E1215 09:01:53.511082 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-fnl74 openstack-cell1 ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-b5589867c-59275" podUID="29a0733c-c283-49e8-8598-b6d5599ee332" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.547953 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74b55f6967-mw6mx"] Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.550418 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.552801 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-networker" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.572009 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74b55f6967-mw6mx"] Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.607451 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-sb\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.607885 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-openstack-cell1\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.607985 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-nb\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.608284 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-dns-svc\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.608442 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-config\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.608558 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-cell1\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.608711 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-sb\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.608863 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-nb\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.609023 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-dns-svc\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.609222 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnl74\" (UniqueName: \"kubernetes.io/projected/29a0733c-c283-49e8-8598-b6d5599ee332-kube-api-access-fnl74\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.609069 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-openstack-cell1\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.609726 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-config\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.609848 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5j6p\" (UniqueName: \"kubernetes.io/projected/c604870c-b808-4753-8f7e-7959041d6f87-kube-api-access-t5j6p\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.610024 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-networker\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.610150 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-config\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.610256 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-dns-svc\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.613778 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-nb\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.615084 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-sb\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.618133 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74b55f6967-mw6mx"] Dec 15 09:01:53 crc kubenswrapper[4876]: E1215 09:01:53.619342 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-t5j6p openstack-cell1 openstack-networker ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" podUID="c604870c-b808-4753-8f7e-7959041d6f87" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.659881 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5864764d55-jldww"] Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.662319 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnl74\" (UniqueName: \"kubernetes.io/projected/29a0733c-c283-49e8-8598-b6d5599ee332-kube-api-access-fnl74\") pod \"dnsmasq-dns-b5589867c-59275\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.664562 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.679293 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5864764d55-jldww"] Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.712349 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5j6p\" (UniqueName: \"kubernetes.io/projected/c604870c-b808-4753-8f7e-7959041d6f87-kube-api-access-t5j6p\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.712408 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-networker\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.712436 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-config\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.712471 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-sb\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.712527 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-cell1\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.712609 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-nb\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.712650 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-dns-svc\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.713710 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-dns-svc\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.714408 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-cell1\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.715618 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-sb\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.716139 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-networker\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.716946 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-config\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.717015 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-nb\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.749529 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5j6p\" (UniqueName: \"kubernetes.io/projected/c604870c-b808-4753-8f7e-7959041d6f87-kube-api-access-t5j6p\") pod \"dnsmasq-dns-74b55f6967-mw6mx\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.814867 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-nb\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.814992 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-dns-svc\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.815057 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-cell1\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.815088 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-config\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.815147 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bftg\" (UniqueName: \"kubernetes.io/projected/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-kube-api-access-6bftg\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.815184 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-sb\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.815237 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-networker\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.918165 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-dns-svc\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.918915 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-cell1\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.919066 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-config\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.919080 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-dns-svc\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.919239 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bftg\" (UniqueName: \"kubernetes.io/projected/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-kube-api-access-6bftg\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.919638 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-sb\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.919791 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-networker\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.920095 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-nb\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.920790 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-config\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.921136 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-nb\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.921086 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-cell1\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.921722 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-sb\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.921815 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-networker\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:53 crc kubenswrapper[4876]: I1215 09:01:53.956887 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bftg\" (UniqueName: \"kubernetes.io/projected/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-kube-api-access-6bftg\") pod \"dnsmasq-dns-5864764d55-jldww\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.036647 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.354953 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.355165 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.401265 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.408876 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.530478 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-networker\") pod \"c604870c-b808-4753-8f7e-7959041d6f87\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.530646 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-openstack-cell1\") pod \"29a0733c-c283-49e8-8598-b6d5599ee332\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.531183 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-networker" (OuterVolumeSpecName: "openstack-networker") pod "c604870c-b808-4753-8f7e-7959041d6f87" (UID: "c604870c-b808-4753-8f7e-7959041d6f87"). InnerVolumeSpecName "openstack-networker". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.531319 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "29a0733c-c283-49e8-8598-b6d5599ee332" (UID: "29a0733c-c283-49e8-8598-b6d5599ee332"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.531397 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-sb\") pod \"29a0733c-c283-49e8-8598-b6d5599ee332\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.531759 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "29a0733c-c283-49e8-8598-b6d5599ee332" (UID: "29a0733c-c283-49e8-8598-b6d5599ee332"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.531822 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-config\") pod \"c604870c-b808-4753-8f7e-7959041d6f87\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.532418 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-config" (OuterVolumeSpecName: "config") pod "c604870c-b808-4753-8f7e-7959041d6f87" (UID: "c604870c-b808-4753-8f7e-7959041d6f87"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.532513 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-nb\") pod \"c604870c-b808-4753-8f7e-7959041d6f87\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.532977 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c604870c-b808-4753-8f7e-7959041d6f87" (UID: "c604870c-b808-4753-8f7e-7959041d6f87"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.532548 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-nb\") pod \"29a0733c-c283-49e8-8598-b6d5599ee332\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.533071 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "29a0733c-c283-49e8-8598-b6d5599ee332" (UID: "29a0733c-c283-49e8-8598-b6d5599ee332"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.533090 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-dns-svc\") pod \"29a0733c-c283-49e8-8598-b6d5599ee332\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.533535 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "29a0733c-c283-49e8-8598-b6d5599ee332" (UID: "29a0733c-c283-49e8-8598-b6d5599ee332"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.533614 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-cell1\") pod \"c604870c-b808-4753-8f7e-7959041d6f87\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.534918 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "c604870c-b808-4753-8f7e-7959041d6f87" (UID: "c604870c-b808-4753-8f7e-7959041d6f87"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.534980 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5j6p\" (UniqueName: \"kubernetes.io/projected/c604870c-b808-4753-8f7e-7959041d6f87-kube-api-access-t5j6p\") pod \"c604870c-b808-4753-8f7e-7959041d6f87\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.535007 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-config\") pod \"29a0733c-c283-49e8-8598-b6d5599ee332\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.535443 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-config" (OuterVolumeSpecName: "config") pod "29a0733c-c283-49e8-8598-b6d5599ee332" (UID: "29a0733c-c283-49e8-8598-b6d5599ee332"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.536028 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-dns-svc\") pod \"c604870c-b808-4753-8f7e-7959041d6f87\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.536070 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnl74\" (UniqueName: \"kubernetes.io/projected/29a0733c-c283-49e8-8598-b6d5599ee332-kube-api-access-fnl74\") pod \"29a0733c-c283-49e8-8598-b6d5599ee332\" (UID: \"29a0733c-c283-49e8-8598-b6d5599ee332\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.536127 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-sb\") pod \"c604870c-b808-4753-8f7e-7959041d6f87\" (UID: \"c604870c-b808-4753-8f7e-7959041d6f87\") " Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.536385 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c604870c-b808-4753-8f7e-7959041d6f87" (UID: "c604870c-b808-4753-8f7e-7959041d6f87"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.536629 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c604870c-b808-4753-8f7e-7959041d6f87" (UID: "c604870c-b808-4753-8f7e-7959041d6f87"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537050 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-networker\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537077 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-openstack-cell1\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537089 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537101 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-config\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537129 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537142 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537154 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537164 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-openstack-cell1\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537175 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29a0733c-c283-49e8-8598-b6d5599ee332-config\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537186 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.537197 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c604870c-b808-4753-8f7e-7959041d6f87-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.541874 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c604870c-b808-4753-8f7e-7959041d6f87-kube-api-access-t5j6p" (OuterVolumeSpecName: "kube-api-access-t5j6p") pod "c604870c-b808-4753-8f7e-7959041d6f87" (UID: "c604870c-b808-4753-8f7e-7959041d6f87"). InnerVolumeSpecName "kube-api-access-t5j6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.542189 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29a0733c-c283-49e8-8598-b6d5599ee332-kube-api-access-fnl74" (OuterVolumeSpecName: "kube-api-access-fnl74") pod "29a0733c-c283-49e8-8598-b6d5599ee332" (UID: "29a0733c-c283-49e8-8598-b6d5599ee332"). InnerVolumeSpecName "kube-api-access-fnl74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:01:54 crc kubenswrapper[4876]: W1215 09:01:54.542218 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac64bf5a_d11c_4d70_8fda_fa0f874a3763.slice/crio-f31cf8d67bb19078b3a85bca75ea97a82972435d1b1e7b1292e17216f0d6fdf6 WatchSource:0}: Error finding container f31cf8d67bb19078b3a85bca75ea97a82972435d1b1e7b1292e17216f0d6fdf6: Status 404 returned error can't find the container with id f31cf8d67bb19078b3a85bca75ea97a82972435d1b1e7b1292e17216f0d6fdf6 Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.543177 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5864764d55-jldww"] Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.638678 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5j6p\" (UniqueName: \"kubernetes.io/projected/c604870c-b808-4753-8f7e-7959041d6f87-kube-api-access-t5j6p\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:54 crc kubenswrapper[4876]: I1215 09:01:54.639023 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnl74\" (UniqueName: \"kubernetes.io/projected/29a0733c-c283-49e8-8598-b6d5599ee332-kube-api-access-fnl74\") on node \"crc\" DevicePath \"\"" Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.365987 4876 generic.go:334] "Generic (PLEG): container finished" podID="ac64bf5a-d11c-4d70-8fda-fa0f874a3763" containerID="1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237" exitCode=0 Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.366057 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5864764d55-jldww" event={"ID":"ac64bf5a-d11c-4d70-8fda-fa0f874a3763","Type":"ContainerDied","Data":"1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237"} Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.366083 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5864764d55-jldww" event={"ID":"ac64bf5a-d11c-4d70-8fda-fa0f874a3763","Type":"ContainerStarted","Data":"f31cf8d67bb19078b3a85bca75ea97a82972435d1b1e7b1292e17216f0d6fdf6"} Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.371850 4876 generic.go:334] "Generic (PLEG): container finished" podID="b361c986-ec90-447f-a50a-c167ee299c25" containerID="7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954" exitCode=0 Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.371979 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qdz4" event={"ID":"b361c986-ec90-447f-a50a-c167ee299c25","Type":"ContainerDied","Data":"7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954"} Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.372269 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b5589867c-59275" Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.372348 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b55f6967-mw6mx" Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.537226 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b5589867c-59275"] Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.550404 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b5589867c-59275"] Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.579154 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74b55f6967-mw6mx"] Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.586682 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74b55f6967-mw6mx"] Dec 15 09:01:55 crc kubenswrapper[4876]: I1215 09:01:55.705933 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:01:55 crc kubenswrapper[4876]: E1215 09:01:55.706329 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:01:56 crc kubenswrapper[4876]: I1215 09:01:56.383339 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5864764d55-jldww" event={"ID":"ac64bf5a-d11c-4d70-8fda-fa0f874a3763","Type":"ContainerStarted","Data":"ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad"} Dec 15 09:01:56 crc kubenswrapper[4876]: I1215 09:01:56.383693 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:01:56 crc kubenswrapper[4876]: I1215 09:01:56.404717 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5864764d55-jldww" podStartSLOduration=3.404695207 podStartE2EDuration="3.404695207s" podCreationTimestamp="2025-12-15 09:01:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:01:56.399631363 +0000 UTC m=+7841.970774274" watchObservedRunningTime="2025-12-15 09:01:56.404695207 +0000 UTC m=+7841.975838128" Dec 15 09:01:56 crc kubenswrapper[4876]: I1215 09:01:56.716418 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29a0733c-c283-49e8-8598-b6d5599ee332" path="/var/lib/kubelet/pods/29a0733c-c283-49e8-8598-b6d5599ee332/volumes" Dec 15 09:01:56 crc kubenswrapper[4876]: I1215 09:01:56.716888 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c604870c-b808-4753-8f7e-7959041d6f87" path="/var/lib/kubelet/pods/c604870c-b808-4753-8f7e-7959041d6f87/volumes" Dec 15 09:01:58 crc kubenswrapper[4876]: I1215 09:01:58.407973 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qdz4" event={"ID":"b361c986-ec90-447f-a50a-c167ee299c25","Type":"ContainerStarted","Data":"129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6"} Dec 15 09:01:58 crc kubenswrapper[4876]: I1215 09:01:58.435016 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5qdz4" podStartSLOduration=2.900271025 podStartE2EDuration="7.434993044s" podCreationTimestamp="2025-12-15 09:01:51 +0000 UTC" firstStartedPulling="2025-12-15 09:01:53.345781737 +0000 UTC m=+7838.916924648" lastFinishedPulling="2025-12-15 09:01:57.880503756 +0000 UTC m=+7843.451646667" observedRunningTime="2025-12-15 09:01:58.427738583 +0000 UTC m=+7843.998881494" watchObservedRunningTime="2025-12-15 09:01:58.434993044 +0000 UTC m=+7844.006135975" Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.045903 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-d2d9x"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.061595 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-47w78"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.070618 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-d9ca-account-create-update-48tvt"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.081881 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-5r9dq"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.090743 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-0ae8-account-create-update-m4mh9"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.099298 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-d2d9x"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.107572 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-9b87-account-create-update-2kv7w"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.116203 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-47w78"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.124742 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-5r9dq"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.132867 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-d9ca-account-create-update-48tvt"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.142325 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-0ae8-account-create-update-m4mh9"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.151427 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-9b87-account-create-update-2kv7w"] Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.720224 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="223cf08e-b426-4b16-a6ba-a5f0c40e8b84" path="/var/lib/kubelet/pods/223cf08e-b426-4b16-a6ba-a5f0c40e8b84/volumes" Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.721534 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eaef55c-be30-42da-8330-d032fb44126a" path="/var/lib/kubelet/pods/4eaef55c-be30-42da-8330-d032fb44126a/volumes" Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.722320 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fcd344c-f201-4c81-8186-3e81f8302a36" path="/var/lib/kubelet/pods/5fcd344c-f201-4c81-8186-3e81f8302a36/volumes" Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.723023 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="953df704-40c6-4eca-8f13-b093568d432b" path="/var/lib/kubelet/pods/953df704-40c6-4eca-8f13-b093568d432b/volumes" Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.724409 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5b5542b-4fb6-4997-9a47-b867bec18c51" path="/var/lib/kubelet/pods/c5b5542b-4fb6-4997-9a47-b867bec18c51/volumes" Dec 15 09:02:00 crc kubenswrapper[4876]: I1215 09:02:00.725131 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0e67fab-6009-4e35-9c27-4796b2edad06" path="/var/lib/kubelet/pods/f0e67fab-6009-4e35-9c27-4796b2edad06/volumes" Dec 15 09:02:02 crc kubenswrapper[4876]: I1215 09:02:02.137598 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:02:02 crc kubenswrapper[4876]: I1215 09:02:02.137941 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:02:02 crc kubenswrapper[4876]: I1215 09:02:02.188947 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.037289 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.104081 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7dfd569bdc-96wwg"] Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.104399 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" podUID="a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" containerName="dnsmasq-dns" containerID="cri-o://9960240d29fea5fcc4852dc11304fa595e307ea10b7fa513308896cb67b05db0" gracePeriod=10 Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.355924 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845684b7f-rwxhg"] Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.358601 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.371842 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845684b7f-rwxhg"] Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.457826 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-692h9\" (UniqueName: \"kubernetes.io/projected/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-kube-api-access-692h9\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.457911 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-dns-svc\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.458120 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-nb\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.458173 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-sb\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.458217 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-config\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.458245 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-networker\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.458286 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-cell1\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.492365 4876 generic.go:334] "Generic (PLEG): container finished" podID="a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" containerID="9960240d29fea5fcc4852dc11304fa595e307ea10b7fa513308896cb67b05db0" exitCode=0 Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.492413 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" event={"ID":"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f","Type":"ContainerDied","Data":"9960240d29fea5fcc4852dc11304fa595e307ea10b7fa513308896cb67b05db0"} Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.560519 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-sb\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.560849 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-config\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.560874 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-networker\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.560916 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-cell1\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.560960 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-692h9\" (UniqueName: \"kubernetes.io/projected/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-kube-api-access-692h9\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.561010 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-dns-svc\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.561183 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-nb\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.561782 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-sb\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.561819 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-config\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.561856 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-nb\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.562650 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-cell1\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.562682 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-networker\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.562726 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-dns-svc\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.590275 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-692h9\" (UniqueName: \"kubernetes.io/projected/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-kube-api-access-692h9\") pod \"dnsmasq-dns-845684b7f-rwxhg\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.719525 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.747556 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.873284 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-dns-svc\") pod \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.874212 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-sb\") pod \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.874392 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlvqx\" (UniqueName: \"kubernetes.io/projected/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-kube-api-access-mlvqx\") pod \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.874435 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-config\") pod \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.874477 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-nb\") pod \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\" (UID: \"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f\") " Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.880271 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-kube-api-access-mlvqx" (OuterVolumeSpecName: "kube-api-access-mlvqx") pod "a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" (UID: "a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f"). InnerVolumeSpecName "kube-api-access-mlvqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.934968 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" (UID: "a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.935443 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-config" (OuterVolumeSpecName: "config") pod "a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" (UID: "a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.944933 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" (UID: "a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.951568 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" (UID: "a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.976775 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlvqx\" (UniqueName: \"kubernetes.io/projected/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-kube-api-access-mlvqx\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.977042 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-config\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.977126 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.977195 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:04 crc kubenswrapper[4876]: I1215 09:02:04.977257 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:05 crc kubenswrapper[4876]: W1215 09:02:05.244772 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90b5e8b3_8a6c_4b7b_a47e_cd7b8524748d.slice/crio-a8eb9cc3f092ec622dd9ec2772fd1193b8fc1b6d1b8a3a3027c0059789f54165 WatchSource:0}: Error finding container a8eb9cc3f092ec622dd9ec2772fd1193b8fc1b6d1b8a3a3027c0059789f54165: Status 404 returned error can't find the container with id a8eb9cc3f092ec622dd9ec2772fd1193b8fc1b6d1b8a3a3027c0059789f54165 Dec 15 09:02:05 crc kubenswrapper[4876]: I1215 09:02:05.248345 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845684b7f-rwxhg"] Dec 15 09:02:05 crc kubenswrapper[4876]: I1215 09:02:05.505393 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" event={"ID":"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d","Type":"ContainerStarted","Data":"a8eb9cc3f092ec622dd9ec2772fd1193b8fc1b6d1b8a3a3027c0059789f54165"} Dec 15 09:02:05 crc kubenswrapper[4876]: I1215 09:02:05.507060 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" event={"ID":"a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f","Type":"ContainerDied","Data":"b5a21428a540cf909b0b17d761d4d045cdb410eb06b0ee014740354c40337472"} Dec 15 09:02:05 crc kubenswrapper[4876]: I1215 09:02:05.507179 4876 scope.go:117] "RemoveContainer" containerID="9960240d29fea5fcc4852dc11304fa595e307ea10b7fa513308896cb67b05db0" Dec 15 09:02:05 crc kubenswrapper[4876]: I1215 09:02:05.507319 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" Dec 15 09:02:05 crc kubenswrapper[4876]: I1215 09:02:05.543621 4876 scope.go:117] "RemoveContainer" containerID="c7eda1d36623b011abd56d9129e2fcb7b465bf74716383aba24096d933e456e8" Dec 15 09:02:05 crc kubenswrapper[4876]: I1215 09:02:05.548930 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7dfd569bdc-96wwg"] Dec 15 09:02:05 crc kubenswrapper[4876]: I1215 09:02:05.564171 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7dfd569bdc-96wwg"] Dec 15 09:02:06 crc kubenswrapper[4876]: I1215 09:02:06.527363 4876 generic.go:334] "Generic (PLEG): container finished" podID="90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" containerID="def221a5696659653fd92368be6c92c7a59d1c67716799c7bb0fe21e99938d49" exitCode=0 Dec 15 09:02:06 crc kubenswrapper[4876]: I1215 09:02:06.527684 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" event={"ID":"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d","Type":"ContainerDied","Data":"def221a5696659653fd92368be6c92c7a59d1c67716799c7bb0fe21e99938d49"} Dec 15 09:02:06 crc kubenswrapper[4876]: I1215 09:02:06.727776 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" path="/var/lib/kubelet/pods/a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f/volumes" Dec 15 09:02:07 crc kubenswrapper[4876]: I1215 09:02:07.543735 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" event={"ID":"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d","Type":"ContainerStarted","Data":"84d743e5e725ffe94e8fc88ca6e8fdacec6cded0d315f5b7746fff90e8c8156f"} Dec 15 09:02:07 crc kubenswrapper[4876]: I1215 09:02:07.544544 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:07 crc kubenswrapper[4876]: I1215 09:02:07.563207 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" podStartSLOduration=3.563183776 podStartE2EDuration="3.563183776s" podCreationTimestamp="2025-12-15 09:02:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:02:07.561761498 +0000 UTC m=+7853.132904419" watchObservedRunningTime="2025-12-15 09:02:07.563183776 +0000 UTC m=+7853.134326707" Dec 15 09:02:07 crc kubenswrapper[4876]: I1215 09:02:07.705936 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:02:07 crc kubenswrapper[4876]: E1215 09:02:07.706626 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:02:09 crc kubenswrapper[4876]: I1215 09:02:09.500349 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7dfd569bdc-96wwg" podUID="a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.138:5353: i/o timeout" Dec 15 09:02:12 crc kubenswrapper[4876]: I1215 09:02:12.188780 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:02:12 crc kubenswrapper[4876]: I1215 09:02:12.252839 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5qdz4"] Dec 15 09:02:12 crc kubenswrapper[4876]: I1215 09:02:12.583707 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5qdz4" podUID="b361c986-ec90-447f-a50a-c167ee299c25" containerName="registry-server" containerID="cri-o://129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6" gracePeriod=2 Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.092773 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.240793 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ls6p\" (UniqueName: \"kubernetes.io/projected/b361c986-ec90-447f-a50a-c167ee299c25-kube-api-access-8ls6p\") pod \"b361c986-ec90-447f-a50a-c167ee299c25\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.240983 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-utilities\") pod \"b361c986-ec90-447f-a50a-c167ee299c25\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.241006 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-catalog-content\") pod \"b361c986-ec90-447f-a50a-c167ee299c25\" (UID: \"b361c986-ec90-447f-a50a-c167ee299c25\") " Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.241837 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-utilities" (OuterVolumeSpecName: "utilities") pod "b361c986-ec90-447f-a50a-c167ee299c25" (UID: "b361c986-ec90-447f-a50a-c167ee299c25"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.246084 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b361c986-ec90-447f-a50a-c167ee299c25-kube-api-access-8ls6p" (OuterVolumeSpecName: "kube-api-access-8ls6p") pod "b361c986-ec90-447f-a50a-c167ee299c25" (UID: "b361c986-ec90-447f-a50a-c167ee299c25"). InnerVolumeSpecName "kube-api-access-8ls6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.263635 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b361c986-ec90-447f-a50a-c167ee299c25" (UID: "b361c986-ec90-447f-a50a-c167ee299c25"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.343068 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.343139 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b361c986-ec90-447f-a50a-c167ee299c25-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.343151 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ls6p\" (UniqueName: \"kubernetes.io/projected/b361c986-ec90-447f-a50a-c167ee299c25-kube-api-access-8ls6p\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.595362 4876 generic.go:334] "Generic (PLEG): container finished" podID="b361c986-ec90-447f-a50a-c167ee299c25" containerID="129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6" exitCode=0 Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.595410 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qdz4" event={"ID":"b361c986-ec90-447f-a50a-c167ee299c25","Type":"ContainerDied","Data":"129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6"} Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.595424 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5qdz4" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.595455 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5qdz4" event={"ID":"b361c986-ec90-447f-a50a-c167ee299c25","Type":"ContainerDied","Data":"61e5480de518f19c2935b723426761482a3d5d496ac3730b45038adf5fb366ea"} Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.595476 4876 scope.go:117] "RemoveContainer" containerID="129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.630622 4876 scope.go:117] "RemoveContainer" containerID="7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.632119 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5qdz4"] Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.640284 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5qdz4"] Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.652175 4876 scope.go:117] "RemoveContainer" containerID="8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.699070 4876 scope.go:117] "RemoveContainer" containerID="129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6" Dec 15 09:02:13 crc kubenswrapper[4876]: E1215 09:02:13.699542 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6\": container with ID starting with 129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6 not found: ID does not exist" containerID="129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.699583 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6"} err="failed to get container status \"129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6\": rpc error: code = NotFound desc = could not find container \"129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6\": container with ID starting with 129938ded44ee6ddde6f7d7454745cc8e4cde6904f19a1f79de7a40aa2ed71b6 not found: ID does not exist" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.699609 4876 scope.go:117] "RemoveContainer" containerID="7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954" Dec 15 09:02:13 crc kubenswrapper[4876]: E1215 09:02:13.700048 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954\": container with ID starting with 7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954 not found: ID does not exist" containerID="7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.700074 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954"} err="failed to get container status \"7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954\": rpc error: code = NotFound desc = could not find container \"7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954\": container with ID starting with 7709a11a646d775c1518ad67d8be90e9852cb652d3356de9779c45e45cde6954 not found: ID does not exist" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.700089 4876 scope.go:117] "RemoveContainer" containerID="8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff" Dec 15 09:02:13 crc kubenswrapper[4876]: E1215 09:02:13.701393 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff\": container with ID starting with 8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff not found: ID does not exist" containerID="8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff" Dec 15 09:02:13 crc kubenswrapper[4876]: I1215 09:02:13.701423 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff"} err="failed to get container status \"8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff\": rpc error: code = NotFound desc = could not find container \"8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff\": container with ID starting with 8f9695e73164496b68a9f69ee0cec219840b966e2537d53737f5331de72c52ff not found: ID does not exist" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.718197 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b361c986-ec90-447f-a50a-c167ee299c25" path="/var/lib/kubelet/pods/b361c986-ec90-447f-a50a-c167ee299c25/volumes" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.721845 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.796089 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5864764d55-jldww"] Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.796392 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5864764d55-jldww" podUID="ac64bf5a-d11c-4d70-8fda-fa0f874a3763" containerName="dnsmasq-dns" containerID="cri-o://ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad" gracePeriod=10 Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.975057 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5694c8549f-nhlb9"] Dec 15 09:02:14 crc kubenswrapper[4876]: E1215 09:02:14.975473 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" containerName="dnsmasq-dns" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.975489 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" containerName="dnsmasq-dns" Dec 15 09:02:14 crc kubenswrapper[4876]: E1215 09:02:14.975508 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b361c986-ec90-447f-a50a-c167ee299c25" containerName="extract-utilities" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.975514 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b361c986-ec90-447f-a50a-c167ee299c25" containerName="extract-utilities" Dec 15 09:02:14 crc kubenswrapper[4876]: E1215 09:02:14.975533 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" containerName="init" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.975539 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" containerName="init" Dec 15 09:02:14 crc kubenswrapper[4876]: E1215 09:02:14.975553 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b361c986-ec90-447f-a50a-c167ee299c25" containerName="registry-server" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.975560 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b361c986-ec90-447f-a50a-c167ee299c25" containerName="registry-server" Dec 15 09:02:14 crc kubenswrapper[4876]: E1215 09:02:14.975573 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b361c986-ec90-447f-a50a-c167ee299c25" containerName="extract-content" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.975579 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b361c986-ec90-447f-a50a-c167ee299c25" containerName="extract-content" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.975771 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3c13c0c-8afe-45dd-8a5c-9e62fd90de7f" containerName="dnsmasq-dns" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.975790 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b361c986-ec90-447f-a50a-c167ee299c25" containerName="registry-server" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.976861 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:14 crc kubenswrapper[4876]: I1215 09:02:14.993026 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5694c8549f-nhlb9"] Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.085310 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-openstack-networker\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.085365 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-ovsdbserver-sb\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.085406 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-ovsdbserver-nb\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.085508 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-config\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.085536 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-dns-svc\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.085608 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-openstack-cell1\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.085668 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx92f\" (UniqueName: \"kubernetes.io/projected/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-kube-api-access-kx92f\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.186976 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-config\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.187017 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-dns-svc\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.187079 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-openstack-cell1\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.187164 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx92f\" (UniqueName: \"kubernetes.io/projected/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-kube-api-access-kx92f\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.187208 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-openstack-networker\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.187230 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-ovsdbserver-sb\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.187255 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-ovsdbserver-nb\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.188267 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-openstack-cell1\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.188310 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-ovsdbserver-nb\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.188985 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-config\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.189273 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-dns-svc\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.189636 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-ovsdbserver-sb\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.189642 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-openstack-networker\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.214358 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx92f\" (UniqueName: \"kubernetes.io/projected/67dfe15a-c41b-46c1-bc0a-0a089a4cfabd-kube-api-access-kx92f\") pod \"dnsmasq-dns-5694c8549f-nhlb9\" (UID: \"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd\") " pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.307968 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.337179 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.390095 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bftg\" (UniqueName: \"kubernetes.io/projected/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-kube-api-access-6bftg\") pod \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.390172 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-nb\") pod \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.390263 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-cell1\") pod \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.390776 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-dns-svc\") pod \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.390840 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-config\") pod \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.390897 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-networker\") pod \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.390960 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-sb\") pod \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\" (UID: \"ac64bf5a-d11c-4d70-8fda-fa0f874a3763\") " Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.396039 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-kube-api-access-6bftg" (OuterVolumeSpecName: "kube-api-access-6bftg") pod "ac64bf5a-d11c-4d70-8fda-fa0f874a3763" (UID: "ac64bf5a-d11c-4d70-8fda-fa0f874a3763"). InnerVolumeSpecName "kube-api-access-6bftg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.453911 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "ac64bf5a-d11c-4d70-8fda-fa0f874a3763" (UID: "ac64bf5a-d11c-4d70-8fda-fa0f874a3763"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.464479 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ac64bf5a-d11c-4d70-8fda-fa0f874a3763" (UID: "ac64bf5a-d11c-4d70-8fda-fa0f874a3763"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.475329 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-networker" (OuterVolumeSpecName: "openstack-networker") pod "ac64bf5a-d11c-4d70-8fda-fa0f874a3763" (UID: "ac64bf5a-d11c-4d70-8fda-fa0f874a3763"). InnerVolumeSpecName "openstack-networker". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.485267 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ac64bf5a-d11c-4d70-8fda-fa0f874a3763" (UID: "ac64bf5a-d11c-4d70-8fda-fa0f874a3763"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.488598 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ac64bf5a-d11c-4d70-8fda-fa0f874a3763" (UID: "ac64bf5a-d11c-4d70-8fda-fa0f874a3763"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.493800 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-config" (OuterVolumeSpecName: "config") pod "ac64bf5a-d11c-4d70-8fda-fa0f874a3763" (UID: "ac64bf5a-d11c-4d70-8fda-fa0f874a3763"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.496587 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-config\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.496619 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-networker\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.496652 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.496663 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bftg\" (UniqueName: \"kubernetes.io/projected/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-kube-api-access-6bftg\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.496674 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.496709 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-openstack-cell1\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.496721 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac64bf5a-d11c-4d70-8fda-fa0f874a3763-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.619510 4876 generic.go:334] "Generic (PLEG): container finished" podID="ac64bf5a-d11c-4d70-8fda-fa0f874a3763" containerID="ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad" exitCode=0 Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.619554 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5864764d55-jldww" event={"ID":"ac64bf5a-d11c-4d70-8fda-fa0f874a3763","Type":"ContainerDied","Data":"ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad"} Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.619579 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5864764d55-jldww" event={"ID":"ac64bf5a-d11c-4d70-8fda-fa0f874a3763","Type":"ContainerDied","Data":"f31cf8d67bb19078b3a85bca75ea97a82972435d1b1e7b1292e17216f0d6fdf6"} Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.619596 4876 scope.go:117] "RemoveContainer" containerID="ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.619716 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5864764d55-jldww" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.669766 4876 scope.go:117] "RemoveContainer" containerID="1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.671813 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5864764d55-jldww"] Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.680385 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5864764d55-jldww"] Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.695287 4876 scope.go:117] "RemoveContainer" containerID="ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad" Dec 15 09:02:15 crc kubenswrapper[4876]: E1215 09:02:15.696076 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad\": container with ID starting with ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad not found: ID does not exist" containerID="ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.696122 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad"} err="failed to get container status \"ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad\": rpc error: code = NotFound desc = could not find container \"ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad\": container with ID starting with ec762fb3b0fc16a43a5d78bed00e7d470daa1ae8546b1670edbf0ce503b202ad not found: ID does not exist" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.696143 4876 scope.go:117] "RemoveContainer" containerID="1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237" Dec 15 09:02:15 crc kubenswrapper[4876]: E1215 09:02:15.696336 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237\": container with ID starting with 1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237 not found: ID does not exist" containerID="1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.696356 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237"} err="failed to get container status \"1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237\": rpc error: code = NotFound desc = could not find container \"1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237\": container with ID starting with 1cfb7d82171f0a36c7abb43a5a3f77134a56571512f097dd3a2a7e7081e5d237 not found: ID does not exist" Dec 15 09:02:15 crc kubenswrapper[4876]: I1215 09:02:15.828532 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5694c8549f-nhlb9"] Dec 15 09:02:16 crc kubenswrapper[4876]: I1215 09:02:16.631098 4876 generic.go:334] "Generic (PLEG): container finished" podID="67dfe15a-c41b-46c1-bc0a-0a089a4cfabd" containerID="717d910030e5268d15d6a9812faa2762175c0e46a78a434c3b54d927cbc0c9c1" exitCode=0 Dec 15 09:02:16 crc kubenswrapper[4876]: I1215 09:02:16.631172 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" event={"ID":"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd","Type":"ContainerDied","Data":"717d910030e5268d15d6a9812faa2762175c0e46a78a434c3b54d927cbc0c9c1"} Dec 15 09:02:16 crc kubenswrapper[4876]: I1215 09:02:16.631399 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" event={"ID":"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd","Type":"ContainerStarted","Data":"465c50de0a5bca88772e87c5c6ed6c946f5e26cc5acacd2ce94f68021c4d99ba"} Dec 15 09:02:16 crc kubenswrapper[4876]: I1215 09:02:16.718548 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac64bf5a-d11c-4d70-8fda-fa0f874a3763" path="/var/lib/kubelet/pods/ac64bf5a-d11c-4d70-8fda-fa0f874a3763/volumes" Dec 15 09:02:17 crc kubenswrapper[4876]: I1215 09:02:17.644799 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" event={"ID":"67dfe15a-c41b-46c1-bc0a-0a089a4cfabd","Type":"ContainerStarted","Data":"152bceede93312cf987a77bfe74f2bb374a503d037ac1ea876af95f5ff51d6be"} Dec 15 09:02:17 crc kubenswrapper[4876]: I1215 09:02:17.645245 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:17 crc kubenswrapper[4876]: I1215 09:02:17.671221 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" podStartSLOduration=3.6712008860000003 podStartE2EDuration="3.671200886s" podCreationTimestamp="2025-12-15 09:02:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:02:17.667388475 +0000 UTC m=+7863.238531466" watchObservedRunningTime="2025-12-15 09:02:17.671200886 +0000 UTC m=+7863.242343787" Dec 15 09:02:19 crc kubenswrapper[4876]: I1215 09:02:19.045333 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6zsmd"] Dec 15 09:02:19 crc kubenswrapper[4876]: I1215 09:02:19.057807 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6zsmd"] Dec 15 09:02:19 crc kubenswrapper[4876]: I1215 09:02:19.706073 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:02:19 crc kubenswrapper[4876]: E1215 09:02:19.706669 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:02:20 crc kubenswrapper[4876]: I1215 09:02:20.719365 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11b68586-fe23-4bf7-9880-17f32a5fc121" path="/var/lib/kubelet/pods/11b68586-fe23-4bf7-9880-17f32a5fc121/volumes" Dec 15 09:02:25 crc kubenswrapper[4876]: I1215 09:02:25.311177 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5694c8549f-nhlb9" Dec 15 09:02:25 crc kubenswrapper[4876]: I1215 09:02:25.377731 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845684b7f-rwxhg"] Dec 15 09:02:25 crc kubenswrapper[4876]: I1215 09:02:25.378346 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" podUID="90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" containerName="dnsmasq-dns" containerID="cri-o://84d743e5e725ffe94e8fc88ca6e8fdacec6cded0d315f5b7746fff90e8c8156f" gracePeriod=10 Dec 15 09:02:25 crc kubenswrapper[4876]: I1215 09:02:25.718846 4876 generic.go:334] "Generic (PLEG): container finished" podID="90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" containerID="84d743e5e725ffe94e8fc88ca6e8fdacec6cded0d315f5b7746fff90e8c8156f" exitCode=0 Dec 15 09:02:25 crc kubenswrapper[4876]: I1215 09:02:25.719096 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" event={"ID":"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d","Type":"ContainerDied","Data":"84d743e5e725ffe94e8fc88ca6e8fdacec6cded0d315f5b7746fff90e8c8156f"} Dec 15 09:02:25 crc kubenswrapper[4876]: I1215 09:02:25.925319 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.013877 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-cell1\") pod \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.014024 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-nb\") pod \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.014133 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-sb\") pod \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.014251 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-config\") pod \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.014294 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-dns-svc\") pod \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.014348 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-networker\") pod \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.014395 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-692h9\" (UniqueName: \"kubernetes.io/projected/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-kube-api-access-692h9\") pod \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\" (UID: \"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d\") " Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.040376 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-kube-api-access-692h9" (OuterVolumeSpecName: "kube-api-access-692h9") pod "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" (UID: "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d"). InnerVolumeSpecName "kube-api-access-692h9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.075598 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" (UID: "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.078234 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-config" (OuterVolumeSpecName: "config") pod "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" (UID: "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.078890 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-networker" (OuterVolumeSpecName: "openstack-networker") pod "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" (UID: "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d"). InnerVolumeSpecName "openstack-networker". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.084818 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" (UID: "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.102667 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" (UID: "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.105196 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" (UID: "90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.116381 4876 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-config\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.116418 4876 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.116428 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-networker\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-networker\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.116441 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-692h9\" (UniqueName: \"kubernetes.io/projected/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-kube-api-access-692h9\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.116449 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-openstack-cell1\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.116458 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.116466 4876 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.733117 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" event={"ID":"90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d","Type":"ContainerDied","Data":"a8eb9cc3f092ec622dd9ec2772fd1193b8fc1b6d1b8a3a3027c0059789f54165"} Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.733863 4876 scope.go:117] "RemoveContainer" containerID="84d743e5e725ffe94e8fc88ca6e8fdacec6cded0d315f5b7746fff90e8c8156f" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.733202 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845684b7f-rwxhg" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.767475 4876 scope.go:117] "RemoveContainer" containerID="def221a5696659653fd92368be6c92c7a59d1c67716799c7bb0fe21e99938d49" Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.767499 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845684b7f-rwxhg"] Dec 15 09:02:26 crc kubenswrapper[4876]: I1215 09:02:26.777883 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845684b7f-rwxhg"] Dec 15 09:02:28 crc kubenswrapper[4876]: I1215 09:02:28.718668 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" path="/var/lib/kubelet/pods/90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d/volumes" Dec 15 09:02:29 crc kubenswrapper[4876]: I1215 09:02:29.450752 4876 scope.go:117] "RemoveContainer" containerID="9df3929b52d68ab57b3705e184a5bb23f4f5f0872a16abcfc91c5ff84756fae8" Dec 15 09:02:29 crc kubenswrapper[4876]: I1215 09:02:29.473075 4876 scope.go:117] "RemoveContainer" containerID="16ae18cde3b96f71b8208d640df0ea38786910a78569dda822df8e43d7e27205" Dec 15 09:02:29 crc kubenswrapper[4876]: I1215 09:02:29.520161 4876 scope.go:117] "RemoveContainer" containerID="361fc22060ff41baf5b749911caa16711f74ea6f5d15c69ee892bc5abee4a52f" Dec 15 09:02:29 crc kubenswrapper[4876]: I1215 09:02:29.579070 4876 scope.go:117] "RemoveContainer" containerID="78947f7dd5b1daef1a8470d69630a6955a70ba18f58ff758ed6f7044b49f8f29" Dec 15 09:02:29 crc kubenswrapper[4876]: I1215 09:02:29.610734 4876 scope.go:117] "RemoveContainer" containerID="c2aec999d98831d9eff88709e2d78ab08404585bea81efb1b2be22bc6c1add6e" Dec 15 09:02:29 crc kubenswrapper[4876]: I1215 09:02:29.653257 4876 scope.go:117] "RemoveContainer" containerID="8ca56a8617bdd7a83636537f726a90544d4e9d6d4243444649cafc83ea495083" Dec 15 09:02:29 crc kubenswrapper[4876]: I1215 09:02:29.703009 4876 scope.go:117] "RemoveContainer" containerID="040522678f9118e4de447e03daa1ea3039e2a5a62e3182154a1129c034633d32" Dec 15 09:02:30 crc kubenswrapper[4876]: I1215 09:02:30.706310 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:02:30 crc kubenswrapper[4876]: E1215 09:02:30.706934 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:02:38 crc kubenswrapper[4876]: I1215 09:02:38.035502 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-z5c25"] Dec 15 09:02:38 crc kubenswrapper[4876]: I1215 09:02:38.050445 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-z5c25"] Dec 15 09:02:38 crc kubenswrapper[4876]: I1215 09:02:38.721902 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22f2e283-c325-496d-ac96-3624106f8233" path="/var/lib/kubelet/pods/22f2e283-c325-496d-ac96-3624106f8233/volumes" Dec 15 09:02:39 crc kubenswrapper[4876]: I1215 09:02:39.038801 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-dtrjw"] Dec 15 09:02:39 crc kubenswrapper[4876]: I1215 09:02:39.048492 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-dtrjw"] Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.720589 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f173f1f0-710b-49df-b7d4-41407cc721ee" path="/var/lib/kubelet/pods/f173f1f0-710b-49df-b7d4-41407cc721ee/volumes" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.769441 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr"] Dec 15 09:02:40 crc kubenswrapper[4876]: E1215 09:02:40.770096 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac64bf5a-d11c-4d70-8fda-fa0f874a3763" containerName="dnsmasq-dns" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.770138 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac64bf5a-d11c-4d70-8fda-fa0f874a3763" containerName="dnsmasq-dns" Dec 15 09:02:40 crc kubenswrapper[4876]: E1215 09:02:40.770158 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" containerName="dnsmasq-dns" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.770164 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" containerName="dnsmasq-dns" Dec 15 09:02:40 crc kubenswrapper[4876]: E1215 09:02:40.770178 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac64bf5a-d11c-4d70-8fda-fa0f874a3763" containerName="init" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.770186 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac64bf5a-d11c-4d70-8fda-fa0f874a3763" containerName="init" Dec 15 09:02:40 crc kubenswrapper[4876]: E1215 09:02:40.770230 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" containerName="init" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.770237 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" containerName="init" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.770477 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac64bf5a-d11c-4d70-8fda-fa0f874a3763" containerName="dnsmasq-dns" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.770498 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="90b5e8b3-8a6c-4b7b-a47e-cd7b8524748d" containerName="dnsmasq-dns" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.775920 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.780484 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.782270 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.782604 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.782755 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.791804 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr"] Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.807496 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r"] Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.809021 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.810165 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r"] Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.812546 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.812878 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.914747 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh9bb\" (UniqueName: \"kubernetes.io/projected/c1ca81a1-767a-4f98-93a1-0f04472a134c-kube-api-access-lh9bb\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.914805 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.914830 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.914847 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.915047 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.915131 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.915341 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcdxg\" (UniqueName: \"kubernetes.io/projected/c38d67c8-ae96-4f07-a552-413a21e47d80-kube-api-access-wcdxg\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.915383 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:40 crc kubenswrapper[4876]: I1215 09:02:40.915813 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.017208 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.017261 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.017288 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.018294 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.018594 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.018702 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcdxg\" (UniqueName: \"kubernetes.io/projected/c38d67c8-ae96-4f07-a552-413a21e47d80-kube-api-access-wcdxg\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.018736 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.018919 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.018962 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh9bb\" (UniqueName: \"kubernetes.io/projected/c1ca81a1-767a-4f98-93a1-0f04472a134c-kube-api-access-lh9bb\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.024363 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.024920 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.026617 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.026618 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.026413 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.024039 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.027754 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.042023 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcdxg\" (UniqueName: \"kubernetes.io/projected/c38d67c8-ae96-4f07-a552-413a21e47d80-kube-api-access-wcdxg\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.042461 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh9bb\" (UniqueName: \"kubernetes.io/projected/c1ca81a1-767a-4f98-93a1-0f04472a134c-kube-api-access-lh9bb\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.100984 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.148053 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.629798 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr"] Dec 15 09:02:41 crc kubenswrapper[4876]: W1215 09:02:41.753686 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1ca81a1_767a_4f98_93a1_0f04472a134c.slice/crio-b3dbdb15d1a9a3894315b5d110c34a1c368d6498c59784bc937908001f4823b7 WatchSource:0}: Error finding container b3dbdb15d1a9a3894315b5d110c34a1c368d6498c59784bc937908001f4823b7: Status 404 returned error can't find the container with id b3dbdb15d1a9a3894315b5d110c34a1c368d6498c59784bc937908001f4823b7 Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.757666 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r"] Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.908377 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" event={"ID":"c1ca81a1-767a-4f98-93a1-0f04472a134c","Type":"ContainerStarted","Data":"b3dbdb15d1a9a3894315b5d110c34a1c368d6498c59784bc937908001f4823b7"} Dec 15 09:02:41 crc kubenswrapper[4876]: I1215 09:02:41.911616 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" event={"ID":"c38d67c8-ae96-4f07-a552-413a21e47d80","Type":"ContainerStarted","Data":"17dc7231c74b326cd59da6a0c4fd5c7e5bb0562cf1fc631a249de7f46735fb5b"} Dec 15 09:02:43 crc kubenswrapper[4876]: I1215 09:02:43.706194 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:02:43 crc kubenswrapper[4876]: E1215 09:02:43.706781 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:02:53 crc kubenswrapper[4876]: I1215 09:02:53.014612 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" event={"ID":"c38d67c8-ae96-4f07-a552-413a21e47d80","Type":"ContainerStarted","Data":"ae51e1d3350bc33b50ce02053aafe80902cc9b593d9412ea60cd46f82ce18332"} Dec 15 09:02:53 crc kubenswrapper[4876]: I1215 09:02:53.016669 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" event={"ID":"c1ca81a1-767a-4f98-93a1-0f04472a134c","Type":"ContainerStarted","Data":"07d90c692e9261aa5f05e60071493d63bd60c1a761981fb2cb4b96d27dc53bdc"} Dec 15 09:02:53 crc kubenswrapper[4876]: I1215 09:02:53.034139 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" podStartSLOduration=2.4213000080000002 podStartE2EDuration="13.034102529s" podCreationTimestamp="2025-12-15 09:02:40 +0000 UTC" firstStartedPulling="2025-12-15 09:02:41.632495315 +0000 UTC m=+7887.203638226" lastFinishedPulling="2025-12-15 09:02:52.245297836 +0000 UTC m=+7897.816440747" observedRunningTime="2025-12-15 09:02:53.033492874 +0000 UTC m=+7898.604635805" watchObservedRunningTime="2025-12-15 09:02:53.034102529 +0000 UTC m=+7898.605245460" Dec 15 09:02:53 crc kubenswrapper[4876]: I1215 09:02:53.060714 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" podStartSLOduration=2.536638633 podStartE2EDuration="13.060691689s" podCreationTimestamp="2025-12-15 09:02:40 +0000 UTC" firstStartedPulling="2025-12-15 09:02:41.756238031 +0000 UTC m=+7887.327380942" lastFinishedPulling="2025-12-15 09:02:52.280291087 +0000 UTC m=+7897.851433998" observedRunningTime="2025-12-15 09:02:53.054553958 +0000 UTC m=+7898.625696899" watchObservedRunningTime="2025-12-15 09:02:53.060691689 +0000 UTC m=+7898.631834600" Dec 15 09:02:54 crc kubenswrapper[4876]: I1215 09:02:54.718246 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:02:54 crc kubenswrapper[4876]: E1215 09:02:54.718467 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:02:57 crc kubenswrapper[4876]: I1215 09:02:57.052605 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-trpzc"] Dec 15 09:02:57 crc kubenswrapper[4876]: I1215 09:02:57.061786 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-trpzc"] Dec 15 09:02:58 crc kubenswrapper[4876]: I1215 09:02:58.718773 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d8692dd-d36c-4f3a-83c5-c3380ae1677c" path="/var/lib/kubelet/pods/8d8692dd-d36c-4f3a-83c5-c3380ae1677c/volumes" Dec 15 09:03:02 crc kubenswrapper[4876]: I1215 09:03:02.102939 4876 generic.go:334] "Generic (PLEG): container finished" podID="c1ca81a1-767a-4f98-93a1-0f04472a134c" containerID="07d90c692e9261aa5f05e60071493d63bd60c1a761981fb2cb4b96d27dc53bdc" exitCode=0 Dec 15 09:03:02 crc kubenswrapper[4876]: I1215 09:03:02.103041 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" event={"ID":"c1ca81a1-767a-4f98-93a1-0f04472a134c","Type":"ContainerDied","Data":"07d90c692e9261aa5f05e60071493d63bd60c1a761981fb2cb4b96d27dc53bdc"} Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.534970 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.637693 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh9bb\" (UniqueName: \"kubernetes.io/projected/c1ca81a1-767a-4f98-93a1-0f04472a134c-kube-api-access-lh9bb\") pod \"c1ca81a1-767a-4f98-93a1-0f04472a134c\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.637739 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-inventory\") pod \"c1ca81a1-767a-4f98-93a1-0f04472a134c\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.637769 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-pre-adoption-validation-combined-ca-bundle\") pod \"c1ca81a1-767a-4f98-93a1-0f04472a134c\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.637914 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-ssh-key\") pod \"c1ca81a1-767a-4f98-93a1-0f04472a134c\" (UID: \"c1ca81a1-767a-4f98-93a1-0f04472a134c\") " Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.647480 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "c1ca81a1-767a-4f98-93a1-0f04472a134c" (UID: "c1ca81a1-767a-4f98-93a1-0f04472a134c"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.656450 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1ca81a1-767a-4f98-93a1-0f04472a134c-kube-api-access-lh9bb" (OuterVolumeSpecName: "kube-api-access-lh9bb") pod "c1ca81a1-767a-4f98-93a1-0f04472a134c" (UID: "c1ca81a1-767a-4f98-93a1-0f04472a134c"). InnerVolumeSpecName "kube-api-access-lh9bb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.671838 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c1ca81a1-767a-4f98-93a1-0f04472a134c" (UID: "c1ca81a1-767a-4f98-93a1-0f04472a134c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.672740 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-inventory" (OuterVolumeSpecName: "inventory") pod "c1ca81a1-767a-4f98-93a1-0f04472a134c" (UID: "c1ca81a1-767a-4f98-93a1-0f04472a134c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.744886 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.746378 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh9bb\" (UniqueName: \"kubernetes.io/projected/c1ca81a1-767a-4f98-93a1-0f04472a134c-kube-api-access-lh9bb\") on node \"crc\" DevicePath \"\"" Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.746416 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:03:03 crc kubenswrapper[4876]: I1215 09:03:03.746431 4876 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1ca81a1-767a-4f98-93a1-0f04472a134c-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:03:04 crc kubenswrapper[4876]: I1215 09:03:04.127736 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" event={"ID":"c1ca81a1-767a-4f98-93a1-0f04472a134c","Type":"ContainerDied","Data":"b3dbdb15d1a9a3894315b5d110c34a1c368d6498c59784bc937908001f4823b7"} Dec 15 09:03:04 crc kubenswrapper[4876]: I1215 09:03:04.127780 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3dbdb15d1a9a3894315b5d110c34a1c368d6498c59784bc937908001f4823b7" Dec 15 09:03:04 crc kubenswrapper[4876]: I1215 09:03:04.127824 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r" Dec 15 09:03:05 crc kubenswrapper[4876]: I1215 09:03:05.139961 4876 generic.go:334] "Generic (PLEG): container finished" podID="c38d67c8-ae96-4f07-a552-413a21e47d80" containerID="ae51e1d3350bc33b50ce02053aafe80902cc9b593d9412ea60cd46f82ce18332" exitCode=0 Dec 15 09:03:05 crc kubenswrapper[4876]: I1215 09:03:05.140081 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" event={"ID":"c38d67c8-ae96-4f07-a552-413a21e47d80","Type":"ContainerDied","Data":"ae51e1d3350bc33b50ce02053aafe80902cc9b593d9412ea60cd46f82ce18332"} Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.590714 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.610005 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ceph\") pod \"c38d67c8-ae96-4f07-a552-413a21e47d80\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.610267 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-pre-adoption-validation-combined-ca-bundle\") pod \"c38d67c8-ae96-4f07-a552-413a21e47d80\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.616255 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "c38d67c8-ae96-4f07-a552-413a21e47d80" (UID: "c38d67c8-ae96-4f07-a552-413a21e47d80"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.617283 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ceph" (OuterVolumeSpecName: "ceph") pod "c38d67c8-ae96-4f07-a552-413a21e47d80" (UID: "c38d67c8-ae96-4f07-a552-413a21e47d80"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.706826 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:03:06 crc kubenswrapper[4876]: E1215 09:03:06.707484 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.713844 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ssh-key\") pod \"c38d67c8-ae96-4f07-a552-413a21e47d80\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.713965 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-inventory\") pod \"c38d67c8-ae96-4f07-a552-413a21e47d80\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.714023 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcdxg\" (UniqueName: \"kubernetes.io/projected/c38d67c8-ae96-4f07-a552-413a21e47d80-kube-api-access-wcdxg\") pod \"c38d67c8-ae96-4f07-a552-413a21e47d80\" (UID: \"c38d67c8-ae96-4f07-a552-413a21e47d80\") " Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.714834 4876 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.714860 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.719201 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c38d67c8-ae96-4f07-a552-413a21e47d80-kube-api-access-wcdxg" (OuterVolumeSpecName: "kube-api-access-wcdxg") pod "c38d67c8-ae96-4f07-a552-413a21e47d80" (UID: "c38d67c8-ae96-4f07-a552-413a21e47d80"). InnerVolumeSpecName "kube-api-access-wcdxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.748956 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c38d67c8-ae96-4f07-a552-413a21e47d80" (UID: "c38d67c8-ae96-4f07-a552-413a21e47d80"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.749891 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-inventory" (OuterVolumeSpecName: "inventory") pod "c38d67c8-ae96-4f07-a552-413a21e47d80" (UID: "c38d67c8-ae96-4f07-a552-413a21e47d80"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.815606 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.815645 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c38d67c8-ae96-4f07-a552-413a21e47d80-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:03:06 crc kubenswrapper[4876]: I1215 09:03:06.815656 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcdxg\" (UniqueName: \"kubernetes.io/projected/c38d67c8-ae96-4f07-a552-413a21e47d80-kube-api-access-wcdxg\") on node \"crc\" DevicePath \"\"" Dec 15 09:03:07 crc kubenswrapper[4876]: I1215 09:03:07.161828 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" event={"ID":"c38d67c8-ae96-4f07-a552-413a21e47d80","Type":"ContainerDied","Data":"17dc7231c74b326cd59da6a0c4fd5c7e5bb0562cf1fc631a249de7f46735fb5b"} Dec 15 09:03:07 crc kubenswrapper[4876]: I1215 09:03:07.162237 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17dc7231c74b326cd59da6a0c4fd5c7e5bb0562cf1fc631a249de7f46735fb5b" Dec 15 09:03:07 crc kubenswrapper[4876]: I1215 09:03:07.162076 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.777481 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279"] Dec 15 09:03:09 crc kubenswrapper[4876]: E1215 09:03:09.778141 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c38d67c8-ae96-4f07-a552-413a21e47d80" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.778156 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c38d67c8-ae96-4f07-a552-413a21e47d80" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Dec 15 09:03:09 crc kubenswrapper[4876]: E1215 09:03:09.778180 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ca81a1-767a-4f98-93a1-0f04472a134c" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-networ" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.778188 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ca81a1-767a-4f98-93a1-0f04472a134c" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-networ" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.778445 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1ca81a1-767a-4f98-93a1-0f04472a134c" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-networ" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.778460 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c38d67c8-ae96-4f07-a552-413a21e47d80" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.779225 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.781373 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.782250 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.782258 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.790181 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.790190 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt"] Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.792879 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.794465 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.801811 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.803919 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279"] Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.815705 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt"] Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.975830 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.975886 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.975923 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.976021 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.976064 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.976125 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbq59\" (UniqueName: \"kubernetes.io/projected/68234414-ac2d-447e-a5c5-4608dc77b5ed-kube-api-access-lbq59\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.976167 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.976215 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt8j9\" (UniqueName: \"kubernetes.io/projected/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-kube-api-access-zt8j9\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:09 crc kubenswrapper[4876]: I1215 09:03:09.976253 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.078201 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbq59\" (UniqueName: \"kubernetes.io/projected/68234414-ac2d-447e-a5c5-4608dc77b5ed-kube-api-access-lbq59\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.078265 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.078317 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt8j9\" (UniqueName: \"kubernetes.io/projected/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-kube-api-access-zt8j9\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.078355 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.078431 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.078468 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.078520 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.078574 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.078611 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.084938 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.085046 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.085171 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.092527 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.092534 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.092898 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.093002 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.095365 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbq59\" (UniqueName: \"kubernetes.io/projected/68234414-ac2d-447e-a5c5-4608dc77b5ed-kube-api-access-lbq59\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.096417 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt8j9\" (UniqueName: \"kubernetes.io/projected/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-kube-api-access-zt8j9\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.099225 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.122195 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.717981 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279"] Dec 15 09:03:10 crc kubenswrapper[4876]: I1215 09:03:10.797594 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt"] Dec 15 09:03:10 crc kubenswrapper[4876]: W1215 09:03:10.801378 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddfb2bbb4_3808_4c06_83d4_12f17922a1e8.slice/crio-5b141ddf671b04cea7def71b9e42c993df2a39cf64a56f9273d7e2d5b223e01b WatchSource:0}: Error finding container 5b141ddf671b04cea7def71b9e42c993df2a39cf64a56f9273d7e2d5b223e01b: Status 404 returned error can't find the container with id 5b141ddf671b04cea7def71b9e42c993df2a39cf64a56f9273d7e2d5b223e01b Dec 15 09:03:11 crc kubenswrapper[4876]: I1215 09:03:11.257385 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" event={"ID":"dfb2bbb4-3808-4c06-83d4-12f17922a1e8","Type":"ContainerStarted","Data":"5b141ddf671b04cea7def71b9e42c993df2a39cf64a56f9273d7e2d5b223e01b"} Dec 15 09:03:11 crc kubenswrapper[4876]: I1215 09:03:11.258510 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" event={"ID":"68234414-ac2d-447e-a5c5-4608dc77b5ed","Type":"ContainerStarted","Data":"41a1c89fc5d334372ba95498ecb25254953b50cc636f81bc37b08ccfee6abacb"} Dec 15 09:03:12 crc kubenswrapper[4876]: I1215 09:03:12.274784 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" event={"ID":"68234414-ac2d-447e-a5c5-4608dc77b5ed","Type":"ContainerStarted","Data":"da3ababd59c63baf0c9c15396d3eef2ae769e9e1a885f3a875a94481c9636f39"} Dec 15 09:03:12 crc kubenswrapper[4876]: I1215 09:03:12.277180 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" event={"ID":"dfb2bbb4-3808-4c06-83d4-12f17922a1e8","Type":"ContainerStarted","Data":"64c1fab03bab92f9ca50c3c7d875a901aad8ce6db8784fe181bfd4dbee900b26"} Dec 15 09:03:12 crc kubenswrapper[4876]: I1215 09:03:12.293593 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" podStartSLOduration=2.860540923 podStartE2EDuration="3.293577066s" podCreationTimestamp="2025-12-15 09:03:09 +0000 UTC" firstStartedPulling="2025-12-15 09:03:10.707414094 +0000 UTC m=+7916.278557015" lastFinishedPulling="2025-12-15 09:03:11.140450247 +0000 UTC m=+7916.711593158" observedRunningTime="2025-12-15 09:03:12.291039989 +0000 UTC m=+7917.862182920" watchObservedRunningTime="2025-12-15 09:03:12.293577066 +0000 UTC m=+7917.864719977" Dec 15 09:03:12 crc kubenswrapper[4876]: I1215 09:03:12.308583 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" podStartSLOduration=2.501601988 podStartE2EDuration="3.30856645s" podCreationTimestamp="2025-12-15 09:03:09 +0000 UTC" firstStartedPulling="2025-12-15 09:03:10.806269424 +0000 UTC m=+7916.377412335" lastFinishedPulling="2025-12-15 09:03:11.613233886 +0000 UTC m=+7917.184376797" observedRunningTime="2025-12-15 09:03:12.306461245 +0000 UTC m=+7917.877604176" watchObservedRunningTime="2025-12-15 09:03:12.30856645 +0000 UTC m=+7917.879709361" Dec 15 09:03:17 crc kubenswrapper[4876]: I1215 09:03:17.705890 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:03:17 crc kubenswrapper[4876]: E1215 09:03:17.706709 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:03:28 crc kubenswrapper[4876]: I1215 09:03:28.706678 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:03:28 crc kubenswrapper[4876]: E1215 09:03:28.707594 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:03:29 crc kubenswrapper[4876]: I1215 09:03:29.940027 4876 scope.go:117] "RemoveContainer" containerID="92a807f7e05a3a548421fd9d1f222d78f5b70f8c29b41cdd696f20c11db4adf0" Dec 15 09:03:29 crc kubenswrapper[4876]: I1215 09:03:29.994537 4876 scope.go:117] "RemoveContainer" containerID="b1dfea02d2f9687f45c446126f30bc122aa5f490f59e9981b9129ad30eb8cfeb" Dec 15 09:03:30 crc kubenswrapper[4876]: I1215 09:03:30.038662 4876 scope.go:117] "RemoveContainer" containerID="1b1bcca7e20d911935e8f6ceed87a9152a4c4a00d644bb4fb06a6845089693ae" Dec 15 09:03:38 crc kubenswrapper[4876]: I1215 09:03:38.042028 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-lpgkh"] Dec 15 09:03:38 crc kubenswrapper[4876]: I1215 09:03:38.051482 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-6125-account-create-update-zskjb"] Dec 15 09:03:38 crc kubenswrapper[4876]: I1215 09:03:38.061359 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-6125-account-create-update-zskjb"] Dec 15 09:03:38 crc kubenswrapper[4876]: I1215 09:03:38.069252 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-lpgkh"] Dec 15 09:03:38 crc kubenswrapper[4876]: I1215 09:03:38.719580 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44e653a9-8f82-45ab-93ac-d7484c3a7b28" path="/var/lib/kubelet/pods/44e653a9-8f82-45ab-93ac-d7484c3a7b28/volumes" Dec 15 09:03:38 crc kubenswrapper[4876]: I1215 09:03:38.720803 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6c55609-894a-484f-88ce-6653d44bb623" path="/var/lib/kubelet/pods/b6c55609-894a-484f-88ce-6653d44bb623/volumes" Dec 15 09:03:39 crc kubenswrapper[4876]: I1215 09:03:39.706874 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:03:39 crc kubenswrapper[4876]: E1215 09:03:39.709048 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:03:51 crc kubenswrapper[4876]: I1215 09:03:51.706464 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:03:51 crc kubenswrapper[4876]: E1215 09:03:51.707165 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:04:05 crc kubenswrapper[4876]: I1215 09:04:05.049249 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-7v4vr"] Dec 15 09:04:05 crc kubenswrapper[4876]: I1215 09:04:05.061292 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-7v4vr"] Dec 15 09:04:05 crc kubenswrapper[4876]: I1215 09:04:05.706119 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:04:05 crc kubenswrapper[4876]: E1215 09:04:05.706678 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:04:06 crc kubenswrapper[4876]: I1215 09:04:06.722315 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9039d1f0-cf45-4e80-b0d9-230d64432bb0" path="/var/lib/kubelet/pods/9039d1f0-cf45-4e80-b0d9-230d64432bb0/volumes" Dec 15 09:04:20 crc kubenswrapper[4876]: I1215 09:04:20.706263 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:04:20 crc kubenswrapper[4876]: E1215 09:04:20.707229 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.161108 4876 scope.go:117] "RemoveContainer" containerID="2fb89f1d9de892b7f59279efd40ad409b53c34610f3a35fd5da9c4e6ea1d1b70" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.185759 4876 scope.go:117] "RemoveContainer" containerID="99fcdd2e28169f39a6fc1ed1b6f4abf1cd0cac0004e7ee1f4bc07fcaace7f127" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.246382 4876 scope.go:117] "RemoveContainer" containerID="730356d5aab15796a485712045094bd92d18a3cb2e8185cafe816d1e10244d6f" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.609818 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lptms"] Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.612433 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.624032 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lptms"] Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.646663 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-catalog-content\") pod \"certified-operators-lptms\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.647073 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfhnb\" (UniqueName: \"kubernetes.io/projected/681a4595-c252-4625-b5b3-3a7cd1458ddf-kube-api-access-pfhnb\") pod \"certified-operators-lptms\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.647237 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-utilities\") pod \"certified-operators-lptms\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.749172 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfhnb\" (UniqueName: \"kubernetes.io/projected/681a4595-c252-4625-b5b3-3a7cd1458ddf-kube-api-access-pfhnb\") pod \"certified-operators-lptms\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.749437 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-utilities\") pod \"certified-operators-lptms\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.749616 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-catalog-content\") pod \"certified-operators-lptms\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.750094 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-utilities\") pod \"certified-operators-lptms\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.750133 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-catalog-content\") pod \"certified-operators-lptms\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.767909 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfhnb\" (UniqueName: \"kubernetes.io/projected/681a4595-c252-4625-b5b3-3a7cd1458ddf-kube-api-access-pfhnb\") pod \"certified-operators-lptms\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:30 crc kubenswrapper[4876]: I1215 09:04:30.939654 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:31 crc kubenswrapper[4876]: I1215 09:04:31.456306 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lptms"] Dec 15 09:04:32 crc kubenswrapper[4876]: I1215 09:04:32.233468 4876 generic.go:334] "Generic (PLEG): container finished" podID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerID="577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c" exitCode=0 Dec 15 09:04:32 crc kubenswrapper[4876]: I1215 09:04:32.233552 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lptms" event={"ID":"681a4595-c252-4625-b5b3-3a7cd1458ddf","Type":"ContainerDied","Data":"577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c"} Dec 15 09:04:32 crc kubenswrapper[4876]: I1215 09:04:32.233797 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lptms" event={"ID":"681a4595-c252-4625-b5b3-3a7cd1458ddf","Type":"ContainerStarted","Data":"e7d134b29bac2d74280ea02f799ec8080ab0fea3f210bbbfe197487a7c676a0c"} Dec 15 09:04:32 crc kubenswrapper[4876]: I1215 09:04:32.237681 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 09:04:34 crc kubenswrapper[4876]: I1215 09:04:34.254484 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lptms" event={"ID":"681a4595-c252-4625-b5b3-3a7cd1458ddf","Type":"ContainerStarted","Data":"d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808"} Dec 15 09:04:34 crc kubenswrapper[4876]: I1215 09:04:34.711406 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:04:34 crc kubenswrapper[4876]: E1215 09:04:34.712520 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:04:35 crc kubenswrapper[4876]: I1215 09:04:35.265148 4876 generic.go:334] "Generic (PLEG): container finished" podID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerID="d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808" exitCode=0 Dec 15 09:04:35 crc kubenswrapper[4876]: I1215 09:04:35.265190 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lptms" event={"ID":"681a4595-c252-4625-b5b3-3a7cd1458ddf","Type":"ContainerDied","Data":"d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808"} Dec 15 09:04:37 crc kubenswrapper[4876]: I1215 09:04:37.287800 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lptms" event={"ID":"681a4595-c252-4625-b5b3-3a7cd1458ddf","Type":"ContainerStarted","Data":"e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2"} Dec 15 09:04:37 crc kubenswrapper[4876]: I1215 09:04:37.305651 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lptms" podStartSLOduration=3.184408025 podStartE2EDuration="7.305628754s" podCreationTimestamp="2025-12-15 09:04:30 +0000 UTC" firstStartedPulling="2025-12-15 09:04:32.237393399 +0000 UTC m=+7997.808536320" lastFinishedPulling="2025-12-15 09:04:36.358614128 +0000 UTC m=+8001.929757049" observedRunningTime="2025-12-15 09:04:37.303878218 +0000 UTC m=+8002.875021129" watchObservedRunningTime="2025-12-15 09:04:37.305628754 +0000 UTC m=+8002.876771665" Dec 15 09:04:40 crc kubenswrapper[4876]: I1215 09:04:40.940564 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:40 crc kubenswrapper[4876]: I1215 09:04:40.940893 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:40 crc kubenswrapper[4876]: I1215 09:04:40.984176 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:41 crc kubenswrapper[4876]: I1215 09:04:41.384048 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:41 crc kubenswrapper[4876]: I1215 09:04:41.435166 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lptms"] Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.347479 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lptms" podUID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerName="registry-server" containerID="cri-o://e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2" gracePeriod=2 Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.820886 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.865311 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-catalog-content\") pod \"681a4595-c252-4625-b5b3-3a7cd1458ddf\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.865548 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-utilities\") pod \"681a4595-c252-4625-b5b3-3a7cd1458ddf\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.865585 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfhnb\" (UniqueName: \"kubernetes.io/projected/681a4595-c252-4625-b5b3-3a7cd1458ddf-kube-api-access-pfhnb\") pod \"681a4595-c252-4625-b5b3-3a7cd1458ddf\" (UID: \"681a4595-c252-4625-b5b3-3a7cd1458ddf\") " Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.867960 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-utilities" (OuterVolumeSpecName: "utilities") pod "681a4595-c252-4625-b5b3-3a7cd1458ddf" (UID: "681a4595-c252-4625-b5b3-3a7cd1458ddf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.879299 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/681a4595-c252-4625-b5b3-3a7cd1458ddf-kube-api-access-pfhnb" (OuterVolumeSpecName: "kube-api-access-pfhnb") pod "681a4595-c252-4625-b5b3-3a7cd1458ddf" (UID: "681a4595-c252-4625-b5b3-3a7cd1458ddf"). InnerVolumeSpecName "kube-api-access-pfhnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.915858 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "681a4595-c252-4625-b5b3-3a7cd1458ddf" (UID: "681a4595-c252-4625-b5b3-3a7cd1458ddf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.967967 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.968013 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfhnb\" (UniqueName: \"kubernetes.io/projected/681a4595-c252-4625-b5b3-3a7cd1458ddf-kube-api-access-pfhnb\") on node \"crc\" DevicePath \"\"" Dec 15 09:04:43 crc kubenswrapper[4876]: I1215 09:04:43.968025 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/681a4595-c252-4625-b5b3-3a7cd1458ddf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.357773 4876 generic.go:334] "Generic (PLEG): container finished" podID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerID="e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2" exitCode=0 Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.357819 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lptms" event={"ID":"681a4595-c252-4625-b5b3-3a7cd1458ddf","Type":"ContainerDied","Data":"e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2"} Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.357845 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lptms" event={"ID":"681a4595-c252-4625-b5b3-3a7cd1458ddf","Type":"ContainerDied","Data":"e7d134b29bac2d74280ea02f799ec8080ab0fea3f210bbbfe197487a7c676a0c"} Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.357862 4876 scope.go:117] "RemoveContainer" containerID="e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.357986 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lptms" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.421276 4876 scope.go:117] "RemoveContainer" containerID="d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.427679 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lptms"] Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.436657 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lptms"] Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.455087 4876 scope.go:117] "RemoveContainer" containerID="577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.492865 4876 scope.go:117] "RemoveContainer" containerID="e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2" Dec 15 09:04:44 crc kubenswrapper[4876]: E1215 09:04:44.493293 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2\": container with ID starting with e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2 not found: ID does not exist" containerID="e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.493331 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2"} err="failed to get container status \"e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2\": rpc error: code = NotFound desc = could not find container \"e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2\": container with ID starting with e783f3a5abab9c6258f7ca0b34770f2b9c567d7baeb50b9aba32c5c12ec11ac2 not found: ID does not exist" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.493354 4876 scope.go:117] "RemoveContainer" containerID="d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808" Dec 15 09:04:44 crc kubenswrapper[4876]: E1215 09:04:44.493754 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808\": container with ID starting with d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808 not found: ID does not exist" containerID="d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.493797 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808"} err="failed to get container status \"d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808\": rpc error: code = NotFound desc = could not find container \"d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808\": container with ID starting with d1f33c9148619623420cd36533983ab4c2fa334dcc61dd80afb33b739a0ef808 not found: ID does not exist" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.493824 4876 scope.go:117] "RemoveContainer" containerID="577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c" Dec 15 09:04:44 crc kubenswrapper[4876]: E1215 09:04:44.494134 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c\": container with ID starting with 577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c not found: ID does not exist" containerID="577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.494156 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c"} err="failed to get container status \"577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c\": rpc error: code = NotFound desc = could not find container \"577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c\": container with ID starting with 577692d166a7738074e23f21347cac33bb1290bda877900f7c25e4874f14623c not found: ID does not exist" Dec 15 09:04:44 crc kubenswrapper[4876]: I1215 09:04:44.720076 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="681a4595-c252-4625-b5b3-3a7cd1458ddf" path="/var/lib/kubelet/pods/681a4595-c252-4625-b5b3-3a7cd1458ddf/volumes" Dec 15 09:04:46 crc kubenswrapper[4876]: I1215 09:04:46.706546 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:04:46 crc kubenswrapper[4876]: E1215 09:04:46.707303 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:04:57 crc kubenswrapper[4876]: I1215 09:04:57.705249 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:04:57 crc kubenswrapper[4876]: E1215 09:04:57.706069 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:05:10 crc kubenswrapper[4876]: I1215 09:05:10.705820 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:05:10 crc kubenswrapper[4876]: E1215 09:05:10.706578 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:05:22 crc kubenswrapper[4876]: I1215 09:05:22.707150 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:05:22 crc kubenswrapper[4876]: E1215 09:05:22.707977 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:05:35 crc kubenswrapper[4876]: I1215 09:05:35.705787 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:05:36 crc kubenswrapper[4876]: I1215 09:05:36.886185 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"ef61df80dc5aaaee61bd2f22ab0be6b3a54ac7a89c0fc277e3ecfb09d65b986e"} Dec 15 09:06:18 crc kubenswrapper[4876]: I1215 09:06:18.763924 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6cv4b"] Dec 15 09:06:18 crc kubenswrapper[4876]: E1215 09:06:18.764952 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerName="extract-content" Dec 15 09:06:18 crc kubenswrapper[4876]: I1215 09:06:18.765002 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerName="extract-content" Dec 15 09:06:18 crc kubenswrapper[4876]: E1215 09:06:18.765026 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerName="registry-server" Dec 15 09:06:18 crc kubenswrapper[4876]: I1215 09:06:18.765032 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerName="registry-server" Dec 15 09:06:18 crc kubenswrapper[4876]: E1215 09:06:18.765051 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerName="extract-utilities" Dec 15 09:06:18 crc kubenswrapper[4876]: I1215 09:06:18.765058 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerName="extract-utilities" Dec 15 09:06:18 crc kubenswrapper[4876]: I1215 09:06:18.765335 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="681a4595-c252-4625-b5b3-3a7cd1458ddf" containerName="registry-server" Dec 15 09:06:18 crc kubenswrapper[4876]: I1215 09:06:18.767088 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:18 crc kubenswrapper[4876]: I1215 09:06:18.786344 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6cv4b"] Dec 15 09:06:18 crc kubenswrapper[4876]: I1215 09:06:18.927962 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jldq6\" (UniqueName: \"kubernetes.io/projected/97b21136-eb6d-45b3-bd82-f2854cc3040e-kube-api-access-jldq6\") pod \"redhat-operators-6cv4b\" (UID: \"97b21136-eb6d-45b3-bd82-f2854cc3040e\") " pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:18 crc kubenswrapper[4876]: I1215 09:06:18.928021 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97b21136-eb6d-45b3-bd82-f2854cc3040e-utilities\") pod \"redhat-operators-6cv4b\" (UID: \"97b21136-eb6d-45b3-bd82-f2854cc3040e\") " pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:18 crc kubenswrapper[4876]: I1215 09:06:18.928213 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97b21136-eb6d-45b3-bd82-f2854cc3040e-catalog-content\") pod \"redhat-operators-6cv4b\" (UID: \"97b21136-eb6d-45b3-bd82-f2854cc3040e\") " pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:19 crc kubenswrapper[4876]: I1215 09:06:19.030410 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jldq6\" (UniqueName: \"kubernetes.io/projected/97b21136-eb6d-45b3-bd82-f2854cc3040e-kube-api-access-jldq6\") pod \"redhat-operators-6cv4b\" (UID: \"97b21136-eb6d-45b3-bd82-f2854cc3040e\") " pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:19 crc kubenswrapper[4876]: I1215 09:06:19.030461 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97b21136-eb6d-45b3-bd82-f2854cc3040e-utilities\") pod \"redhat-operators-6cv4b\" (UID: \"97b21136-eb6d-45b3-bd82-f2854cc3040e\") " pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:19 crc kubenswrapper[4876]: I1215 09:06:19.030557 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97b21136-eb6d-45b3-bd82-f2854cc3040e-catalog-content\") pod \"redhat-operators-6cv4b\" (UID: \"97b21136-eb6d-45b3-bd82-f2854cc3040e\") " pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:19 crc kubenswrapper[4876]: I1215 09:06:19.031144 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97b21136-eb6d-45b3-bd82-f2854cc3040e-catalog-content\") pod \"redhat-operators-6cv4b\" (UID: \"97b21136-eb6d-45b3-bd82-f2854cc3040e\") " pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:19 crc kubenswrapper[4876]: I1215 09:06:19.031874 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97b21136-eb6d-45b3-bd82-f2854cc3040e-utilities\") pod \"redhat-operators-6cv4b\" (UID: \"97b21136-eb6d-45b3-bd82-f2854cc3040e\") " pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:19 crc kubenswrapper[4876]: I1215 09:06:19.051179 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jldq6\" (UniqueName: \"kubernetes.io/projected/97b21136-eb6d-45b3-bd82-f2854cc3040e-kube-api-access-jldq6\") pod \"redhat-operators-6cv4b\" (UID: \"97b21136-eb6d-45b3-bd82-f2854cc3040e\") " pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:19 crc kubenswrapper[4876]: I1215 09:06:19.088825 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:19 crc kubenswrapper[4876]: I1215 09:06:19.576538 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6cv4b"] Dec 15 09:06:20 crc kubenswrapper[4876]: I1215 09:06:20.308165 4876 generic.go:334] "Generic (PLEG): container finished" podID="97b21136-eb6d-45b3-bd82-f2854cc3040e" containerID="e371f6efb44d0b695636953563481d99372ec9cb45324f6ae014bfe7438c9994" exitCode=0 Dec 15 09:06:20 crc kubenswrapper[4876]: I1215 09:06:20.308212 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6cv4b" event={"ID":"97b21136-eb6d-45b3-bd82-f2854cc3040e","Type":"ContainerDied","Data":"e371f6efb44d0b695636953563481d99372ec9cb45324f6ae014bfe7438c9994"} Dec 15 09:06:20 crc kubenswrapper[4876]: I1215 09:06:20.308507 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6cv4b" event={"ID":"97b21136-eb6d-45b3-bd82-f2854cc3040e","Type":"ContainerStarted","Data":"500b1100f94603a676865384d98d6bdad5c1522199207920fa52caabd801091c"} Dec 15 09:06:32 crc kubenswrapper[4876]: I1215 09:06:32.442462 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6cv4b" event={"ID":"97b21136-eb6d-45b3-bd82-f2854cc3040e","Type":"ContainerStarted","Data":"05c6455881aabbc10866ac17356cbd9397ed45f8908c98b6704c71023a549486"} Dec 15 09:06:34 crc kubenswrapper[4876]: I1215 09:06:34.464050 4876 generic.go:334] "Generic (PLEG): container finished" podID="97b21136-eb6d-45b3-bd82-f2854cc3040e" containerID="05c6455881aabbc10866ac17356cbd9397ed45f8908c98b6704c71023a549486" exitCode=0 Dec 15 09:06:34 crc kubenswrapper[4876]: I1215 09:06:34.464184 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6cv4b" event={"ID":"97b21136-eb6d-45b3-bd82-f2854cc3040e","Type":"ContainerDied","Data":"05c6455881aabbc10866ac17356cbd9397ed45f8908c98b6704c71023a549486"} Dec 15 09:06:35 crc kubenswrapper[4876]: I1215 09:06:35.474831 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6cv4b" event={"ID":"97b21136-eb6d-45b3-bd82-f2854cc3040e","Type":"ContainerStarted","Data":"394ca7525173f8e8901eb348ca1db9b0c26f09eb5bf5d9bc9c93a692f3294ffe"} Dec 15 09:06:35 crc kubenswrapper[4876]: I1215 09:06:35.504602 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6cv4b" podStartSLOduration=2.7333672780000002 podStartE2EDuration="17.504556847s" podCreationTimestamp="2025-12-15 09:06:18 +0000 UTC" firstStartedPulling="2025-12-15 09:06:20.312297349 +0000 UTC m=+8105.883440260" lastFinishedPulling="2025-12-15 09:06:35.083486918 +0000 UTC m=+8120.654629829" observedRunningTime="2025-12-15 09:06:35.496853923 +0000 UTC m=+8121.067996844" watchObservedRunningTime="2025-12-15 09:06:35.504556847 +0000 UTC m=+8121.075699788" Dec 15 09:06:39 crc kubenswrapper[4876]: I1215 09:06:39.089540 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:39 crc kubenswrapper[4876]: I1215 09:06:39.090140 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:40 crc kubenswrapper[4876]: I1215 09:06:40.141652 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6cv4b" podUID="97b21136-eb6d-45b3-bd82-f2854cc3040e" containerName="registry-server" probeResult="failure" output=< Dec 15 09:06:40 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 09:06:40 crc kubenswrapper[4876]: > Dec 15 09:06:49 crc kubenswrapper[4876]: I1215 09:06:49.134730 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:49 crc kubenswrapper[4876]: I1215 09:06:49.184200 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6cv4b" Dec 15 09:06:49 crc kubenswrapper[4876]: I1215 09:06:49.798814 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6cv4b"] Dec 15 09:06:49 crc kubenswrapper[4876]: I1215 09:06:49.971594 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ppx4h"] Dec 15 09:06:49 crc kubenswrapper[4876]: I1215 09:06:49.971888 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ppx4h" podUID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerName="registry-server" containerID="cri-o://5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522" gracePeriod=2 Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.574310 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.616973 4876 generic.go:334] "Generic (PLEG): container finished" podID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerID="5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522" exitCode=0 Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.617046 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ppx4h" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.617065 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ppx4h" event={"ID":"5f508be0-2fa4-4b41-a7bd-c0366bac78a5","Type":"ContainerDied","Data":"5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522"} Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.617132 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ppx4h" event={"ID":"5f508be0-2fa4-4b41-a7bd-c0366bac78a5","Type":"ContainerDied","Data":"94dd1401e3e133cbf4d5147b0a068181181229ab05fd3087a7b077112a633fdf"} Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.617158 4876 scope.go:117] "RemoveContainer" containerID="5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.652825 4876 scope.go:117] "RemoveContainer" containerID="390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.675251 4876 scope.go:117] "RemoveContainer" containerID="3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.688704 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-utilities\") pod \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.688771 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ws7j6\" (UniqueName: \"kubernetes.io/projected/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-kube-api-access-ws7j6\") pod \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.689154 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-catalog-content\") pod \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\" (UID: \"5f508be0-2fa4-4b41-a7bd-c0366bac78a5\") " Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.689662 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-utilities" (OuterVolumeSpecName: "utilities") pod "5f508be0-2fa4-4b41-a7bd-c0366bac78a5" (UID: "5f508be0-2fa4-4b41-a7bd-c0366bac78a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.694611 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-kube-api-access-ws7j6" (OuterVolumeSpecName: "kube-api-access-ws7j6") pod "5f508be0-2fa4-4b41-a7bd-c0366bac78a5" (UID: "5f508be0-2fa4-4b41-a7bd-c0366bac78a5"). InnerVolumeSpecName "kube-api-access-ws7j6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.791463 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.791495 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ws7j6\" (UniqueName: \"kubernetes.io/projected/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-kube-api-access-ws7j6\") on node \"crc\" DevicePath \"\"" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.794064 4876 scope.go:117] "RemoveContainer" containerID="5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522" Dec 15 09:06:50 crc kubenswrapper[4876]: E1215 09:06:50.794659 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522\": container with ID starting with 5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522 not found: ID does not exist" containerID="5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.794696 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522"} err="failed to get container status \"5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522\": rpc error: code = NotFound desc = could not find container \"5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522\": container with ID starting with 5872db98e4ef32c20f69c0fa0e5c962cd1a8fbcd04114df26752aba83504f522 not found: ID does not exist" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.794719 4876 scope.go:117] "RemoveContainer" containerID="390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8" Dec 15 09:06:50 crc kubenswrapper[4876]: E1215 09:06:50.795053 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8\": container with ID starting with 390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8 not found: ID does not exist" containerID="390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.795073 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8"} err="failed to get container status \"390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8\": rpc error: code = NotFound desc = could not find container \"390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8\": container with ID starting with 390d64a4714e02426e1c8ffa5ae76e735729c924164d3dc233c6d06af400e7d8 not found: ID does not exist" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.795087 4876 scope.go:117] "RemoveContainer" containerID="3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38" Dec 15 09:06:50 crc kubenswrapper[4876]: E1215 09:06:50.795571 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38\": container with ID starting with 3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38 not found: ID does not exist" containerID="3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.795623 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38"} err="failed to get container status \"3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38\": rpc error: code = NotFound desc = could not find container \"3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38\": container with ID starting with 3d1bd9252720262ea6f695c42b78e849cfee87e3db71f1e1ce6e6930f7f12b38 not found: ID does not exist" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.817280 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f508be0-2fa4-4b41-a7bd-c0366bac78a5" (UID: "5f508be0-2fa4-4b41-a7bd-c0366bac78a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.893825 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f508be0-2fa4-4b41-a7bd-c0366bac78a5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.953295 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ppx4h"] Dec 15 09:06:50 crc kubenswrapper[4876]: I1215 09:06:50.962496 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ppx4h"] Dec 15 09:06:52 crc kubenswrapper[4876]: I1215 09:06:52.719761 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" path="/var/lib/kubelet/pods/5f508be0-2fa4-4b41-a7bd-c0366bac78a5/volumes" Dec 15 09:07:30 crc kubenswrapper[4876]: I1215 09:07:30.984646 4876 generic.go:334] "Generic (PLEG): container finished" podID="dfb2bbb4-3808-4c06-83d4-12f17922a1e8" containerID="64c1fab03bab92f9ca50c3c7d875a901aad8ce6db8784fe181bfd4dbee900b26" exitCode=0 Dec 15 09:07:30 crc kubenswrapper[4876]: I1215 09:07:30.984708 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" event={"ID":"dfb2bbb4-3808-4c06-83d4-12f17922a1e8","Type":"ContainerDied","Data":"64c1fab03bab92f9ca50c3c7d875a901aad8ce6db8784fe181bfd4dbee900b26"} Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.502265 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.671874 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-ssh-key\") pod \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.671965 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-inventory\") pod \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.672023 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zt8j9\" (UniqueName: \"kubernetes.io/projected/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-kube-api-access-zt8j9\") pod \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.672198 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-tripleo-cleanup-combined-ca-bundle\") pod \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\" (UID: \"dfb2bbb4-3808-4c06-83d4-12f17922a1e8\") " Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.677807 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "dfb2bbb4-3808-4c06-83d4-12f17922a1e8" (UID: "dfb2bbb4-3808-4c06-83d4-12f17922a1e8"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.679573 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-kube-api-access-zt8j9" (OuterVolumeSpecName: "kube-api-access-zt8j9") pod "dfb2bbb4-3808-4c06-83d4-12f17922a1e8" (UID: "dfb2bbb4-3808-4c06-83d4-12f17922a1e8"). InnerVolumeSpecName "kube-api-access-zt8j9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.701570 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "dfb2bbb4-3808-4c06-83d4-12f17922a1e8" (UID: "dfb2bbb4-3808-4c06-83d4-12f17922a1e8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.712647 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-inventory" (OuterVolumeSpecName: "inventory") pod "dfb2bbb4-3808-4c06-83d4-12f17922a1e8" (UID: "dfb2bbb4-3808-4c06-83d4-12f17922a1e8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.774714 4876 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.774763 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.774784 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:07:32 crc kubenswrapper[4876]: I1215 09:07:32.774798 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zt8j9\" (UniqueName: \"kubernetes.io/projected/dfb2bbb4-3808-4c06-83d4-12f17922a1e8-kube-api-access-zt8j9\") on node \"crc\" DevicePath \"\"" Dec 15 09:07:33 crc kubenswrapper[4876]: I1215 09:07:33.004094 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" event={"ID":"dfb2bbb4-3808-4c06-83d4-12f17922a1e8","Type":"ContainerDied","Data":"5b141ddf671b04cea7def71b9e42c993df2a39cf64a56f9273d7e2d5b223e01b"} Dec 15 09:07:33 crc kubenswrapper[4876]: I1215 09:07:33.004155 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b141ddf671b04cea7def71b9e42c993df2a39cf64a56f9273d7e2d5b223e01b" Dec 15 09:07:33 crc kubenswrapper[4876]: I1215 09:07:33.004180 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt" Dec 15 09:07:36 crc kubenswrapper[4876]: I1215 09:07:36.042329 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-xnvd2"] Dec 15 09:07:36 crc kubenswrapper[4876]: I1215 09:07:36.068357 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-xnvd2"] Dec 15 09:07:36 crc kubenswrapper[4876]: I1215 09:07:36.717320 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="478cc8c2-0e45-4db3-aba9-4a4c7de64b60" path="/var/lib/kubelet/pods/478cc8c2-0e45-4db3-aba9-4a4c7de64b60/volumes" Dec 15 09:07:37 crc kubenswrapper[4876]: I1215 09:07:37.024532 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-7e75-account-create-update-zr4ks"] Dec 15 09:07:37 crc kubenswrapper[4876]: I1215 09:07:37.033353 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-7e75-account-create-update-zr4ks"] Dec 15 09:07:38 crc kubenswrapper[4876]: I1215 09:07:38.718940 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4497a99a-b1f8-4e33-96fa-0e5b8462f1ec" path="/var/lib/kubelet/pods/4497a99a-b1f8-4e33-96fa-0e5b8462f1ec/volumes" Dec 15 09:07:51 crc kubenswrapper[4876]: I1215 09:07:51.061483 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-mv94m"] Dec 15 09:07:51 crc kubenswrapper[4876]: I1215 09:07:51.072334 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-mv94m"] Dec 15 09:07:52 crc kubenswrapper[4876]: I1215 09:07:52.716555 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b5723b0-8528-4595-a010-8a861e73e8a6" path="/var/lib/kubelet/pods/9b5723b0-8528-4595-a010-8a861e73e8a6/volumes" Dec 15 09:07:57 crc kubenswrapper[4876]: I1215 09:07:57.322767 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:07:57 crc kubenswrapper[4876]: I1215 09:07:57.323378 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:08:27 crc kubenswrapper[4876]: I1215 09:08:27.323034 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:08:27 crc kubenswrapper[4876]: I1215 09:08:27.323627 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:08:30 crc kubenswrapper[4876]: I1215 09:08:30.490730 4876 scope.go:117] "RemoveContainer" containerID="6369db67ff0505b20006eb6003a3ea728bbd5ce514b5dcc9a6eed99da114916f" Dec 15 09:08:30 crc kubenswrapper[4876]: I1215 09:08:30.528825 4876 scope.go:117] "RemoveContainer" containerID="718f7e3398908350747e65abc793f951233f7950528bcfd948460bd4b39d1d66" Dec 15 09:08:30 crc kubenswrapper[4876]: I1215 09:08:30.586567 4876 scope.go:117] "RemoveContainer" containerID="ddf28cdd9db2efa4271303729b6015563fb33d3fd2db404dd3ea9c717ec039d0" Dec 15 09:08:57 crc kubenswrapper[4876]: I1215 09:08:57.322785 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:08:57 crc kubenswrapper[4876]: I1215 09:08:57.324525 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:08:57 crc kubenswrapper[4876]: I1215 09:08:57.324698 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:08:57 crc kubenswrapper[4876]: I1215 09:08:57.325584 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ef61df80dc5aaaee61bd2f22ab0be6b3a54ac7a89c0fc277e3ecfb09d65b986e"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:08:57 crc kubenswrapper[4876]: I1215 09:08:57.325708 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://ef61df80dc5aaaee61bd2f22ab0be6b3a54ac7a89c0fc277e3ecfb09d65b986e" gracePeriod=600 Dec 15 09:08:57 crc kubenswrapper[4876]: I1215 09:08:57.740991 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="ef61df80dc5aaaee61bd2f22ab0be6b3a54ac7a89c0fc277e3ecfb09d65b986e" exitCode=0 Dec 15 09:08:57 crc kubenswrapper[4876]: I1215 09:08:57.741087 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"ef61df80dc5aaaee61bd2f22ab0be6b3a54ac7a89c0fc277e3ecfb09d65b986e"} Dec 15 09:08:57 crc kubenswrapper[4876]: I1215 09:08:57.741379 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3"} Dec 15 09:08:57 crc kubenswrapper[4876]: I1215 09:08:57.741404 4876 scope.go:117] "RemoveContainer" containerID="fe64fb92e55239fb7c51d164af6d255ab7c6086373e2d2dd70fd45d2069d31b3" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.661348 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pncnp"] Dec 15 09:09:50 crc kubenswrapper[4876]: E1215 09:09:50.663628 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerName="extract-content" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.663755 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerName="extract-content" Dec 15 09:09:50 crc kubenswrapper[4876]: E1215 09:09:50.663840 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerName="extract-utilities" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.663912 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerName="extract-utilities" Dec 15 09:09:50 crc kubenswrapper[4876]: E1215 09:09:50.664017 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfb2bbb4-3808-4c06-83d4-12f17922a1e8" containerName="tripleo-cleanup-tripleo-cleanup-openstack-networker" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.664096 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfb2bbb4-3808-4c06-83d4-12f17922a1e8" containerName="tripleo-cleanup-tripleo-cleanup-openstack-networker" Dec 15 09:09:50 crc kubenswrapper[4876]: E1215 09:09:50.664199 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerName="registry-server" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.664275 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerName="registry-server" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.664676 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f508be0-2fa4-4b41-a7bd-c0366bac78a5" containerName="registry-server" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.664790 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfb2bbb4-3808-4c06-83d4-12f17922a1e8" containerName="tripleo-cleanup-tripleo-cleanup-openstack-networker" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.681529 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.693217 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pncnp"] Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.775512 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52fnr\" (UniqueName: \"kubernetes.io/projected/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-kube-api-access-52fnr\") pod \"community-operators-pncnp\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.775608 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-catalog-content\") pod \"community-operators-pncnp\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.775663 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-utilities\") pod \"community-operators-pncnp\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.878198 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52fnr\" (UniqueName: \"kubernetes.io/projected/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-kube-api-access-52fnr\") pod \"community-operators-pncnp\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.878343 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-catalog-content\") pod \"community-operators-pncnp\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.878418 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-utilities\") pod \"community-operators-pncnp\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.878960 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-catalog-content\") pod \"community-operators-pncnp\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.879007 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-utilities\") pod \"community-operators-pncnp\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:50 crc kubenswrapper[4876]: I1215 09:09:50.915681 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52fnr\" (UniqueName: \"kubernetes.io/projected/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-kube-api-access-52fnr\") pod \"community-operators-pncnp\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:51 crc kubenswrapper[4876]: I1215 09:09:51.035658 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:09:51 crc kubenswrapper[4876]: I1215 09:09:51.599071 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pncnp"] Dec 15 09:09:52 crc kubenswrapper[4876]: I1215 09:09:52.277594 4876 generic.go:334] "Generic (PLEG): container finished" podID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerID="d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba" exitCode=0 Dec 15 09:09:52 crc kubenswrapper[4876]: I1215 09:09:52.277641 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pncnp" event={"ID":"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc","Type":"ContainerDied","Data":"d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba"} Dec 15 09:09:52 crc kubenswrapper[4876]: I1215 09:09:52.277665 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pncnp" event={"ID":"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc","Type":"ContainerStarted","Data":"5f9bd629e53ac1e0e9c316f1e3b4c094aeb3f09a922c0aab42a11f42684cfd7e"} Dec 15 09:09:52 crc kubenswrapper[4876]: I1215 09:09:52.280260 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 09:09:54 crc kubenswrapper[4876]: I1215 09:09:54.298035 4876 generic.go:334] "Generic (PLEG): container finished" podID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerID="f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373" exitCode=0 Dec 15 09:09:54 crc kubenswrapper[4876]: I1215 09:09:54.298122 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pncnp" event={"ID":"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc","Type":"ContainerDied","Data":"f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373"} Dec 15 09:09:55 crc kubenswrapper[4876]: I1215 09:09:55.307347 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pncnp" event={"ID":"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc","Type":"ContainerStarted","Data":"bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5"} Dec 15 09:09:55 crc kubenswrapper[4876]: I1215 09:09:55.325166 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pncnp" podStartSLOduration=2.599584108 podStartE2EDuration="5.325148511s" podCreationTimestamp="2025-12-15 09:09:50 +0000 UTC" firstStartedPulling="2025-12-15 09:09:52.280026848 +0000 UTC m=+8317.851169759" lastFinishedPulling="2025-12-15 09:09:55.005591251 +0000 UTC m=+8320.576734162" observedRunningTime="2025-12-15 09:09:55.323612199 +0000 UTC m=+8320.894755120" watchObservedRunningTime="2025-12-15 09:09:55.325148511 +0000 UTC m=+8320.896291442" Dec 15 09:10:01 crc kubenswrapper[4876]: I1215 09:10:01.036480 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:10:01 crc kubenswrapper[4876]: I1215 09:10:01.037462 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:10:01 crc kubenswrapper[4876]: I1215 09:10:01.085424 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:10:01 crc kubenswrapper[4876]: I1215 09:10:01.421156 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:10:01 crc kubenswrapper[4876]: I1215 09:10:01.466986 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pncnp"] Dec 15 09:10:03 crc kubenswrapper[4876]: I1215 09:10:03.380552 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pncnp" podUID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerName="registry-server" containerID="cri-o://bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5" gracePeriod=2 Dec 15 09:10:03 crc kubenswrapper[4876]: I1215 09:10:03.995649 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.066225 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-qzrjp"] Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.074073 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-3647-account-create-update-vw4kb"] Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.082876 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-3647-account-create-update-vw4kb"] Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.090905 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-qzrjp"] Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.144190 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-utilities\") pod \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.144365 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-catalog-content\") pod \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.144554 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52fnr\" (UniqueName: \"kubernetes.io/projected/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-kube-api-access-52fnr\") pod \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\" (UID: \"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc\") " Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.144947 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-utilities" (OuterVolumeSpecName: "utilities") pod "365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" (UID: "365c08e7-1bba-4bb4-8ff2-d528eef4a2dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.145041 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.159405 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-kube-api-access-52fnr" (OuterVolumeSpecName: "kube-api-access-52fnr") pod "365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" (UID: "365c08e7-1bba-4bb4-8ff2-d528eef4a2dc"). InnerVolumeSpecName "kube-api-access-52fnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.199229 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" (UID: "365c08e7-1bba-4bb4-8ff2-d528eef4a2dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.247517 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.247552 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52fnr\" (UniqueName: \"kubernetes.io/projected/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc-kube-api-access-52fnr\") on node \"crc\" DevicePath \"\"" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.391481 4876 generic.go:334] "Generic (PLEG): container finished" podID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerID="bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5" exitCode=0 Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.391543 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pncnp" event={"ID":"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc","Type":"ContainerDied","Data":"bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5"} Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.391589 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pncnp" event={"ID":"365c08e7-1bba-4bb4-8ff2-d528eef4a2dc","Type":"ContainerDied","Data":"5f9bd629e53ac1e0e9c316f1e3b4c094aeb3f09a922c0aab42a11f42684cfd7e"} Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.391616 4876 scope.go:117] "RemoveContainer" containerID="bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.391824 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pncnp" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.431244 4876 scope.go:117] "RemoveContainer" containerID="f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.443719 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pncnp"] Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.451918 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pncnp"] Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.479904 4876 scope.go:117] "RemoveContainer" containerID="d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.503854 4876 scope.go:117] "RemoveContainer" containerID="bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5" Dec 15 09:10:04 crc kubenswrapper[4876]: E1215 09:10:04.504252 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5\": container with ID starting with bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5 not found: ID does not exist" containerID="bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.504288 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5"} err="failed to get container status \"bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5\": rpc error: code = NotFound desc = could not find container \"bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5\": container with ID starting with bdec25305ab940f53d9f441cadd3edddceccd91a4a973d026798647063768ac5 not found: ID does not exist" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.504310 4876 scope.go:117] "RemoveContainer" containerID="f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373" Dec 15 09:10:04 crc kubenswrapper[4876]: E1215 09:10:04.504669 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373\": container with ID starting with f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373 not found: ID does not exist" containerID="f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.504694 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373"} err="failed to get container status \"f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373\": rpc error: code = NotFound desc = could not find container \"f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373\": container with ID starting with f62ce1dd2cfcea527b72d4f8793285996627efc32338efe3307b3a36ee855373 not found: ID does not exist" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.504708 4876 scope.go:117] "RemoveContainer" containerID="d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba" Dec 15 09:10:04 crc kubenswrapper[4876]: E1215 09:10:04.505002 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba\": container with ID starting with d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba not found: ID does not exist" containerID="d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.505027 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba"} err="failed to get container status \"d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba\": rpc error: code = NotFound desc = could not find container \"d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba\": container with ID starting with d0d2f5871f2aca2b79673f47ec595b8f6c6a4870aa44c45985626b97c9ac8bba not found: ID does not exist" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.717090 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" path="/var/lib/kubelet/pods/365c08e7-1bba-4bb4-8ff2-d528eef4a2dc/volumes" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.717954 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b68576a6-b067-44ef-8322-a9c902ca5a86" path="/var/lib/kubelet/pods/b68576a6-b067-44ef-8322-a9c902ca5a86/volumes" Dec 15 09:10:04 crc kubenswrapper[4876]: I1215 09:10:04.718620 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbd6be73-ea8e-44e0-a951-970e42acf3cf" path="/var/lib/kubelet/pods/bbd6be73-ea8e-44e0-a951-970e42acf3cf/volumes" Dec 15 09:10:14 crc kubenswrapper[4876]: I1215 09:10:14.042747 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-l2g6k"] Dec 15 09:10:14 crc kubenswrapper[4876]: I1215 09:10:14.052875 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-l2g6k"] Dec 15 09:10:14 crc kubenswrapper[4876]: I1215 09:10:14.728003 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d98f4901-6ef5-4f21-a8fc-6830229131c3" path="/var/lib/kubelet/pods/d98f4901-6ef5-4f21-a8fc-6830229131c3/volumes" Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.053936 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-n9md8"] Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.067410 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-n9md8"] Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.081709 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-683e-account-create-update-tw7lf"] Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.093317 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-683e-account-create-update-tw7lf"] Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.717270 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28b4882c-bb24-406f-bf0f-9d788a13c40f" path="/var/lib/kubelet/pods/28b4882c-bb24-406f-bf0f-9d788a13c40f/volumes" Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.717879 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91c93649-bf5e-4e88-8f80-5cb4f62d2b19" path="/var/lib/kubelet/pods/91c93649-bf5e-4e88-8f80-5cb4f62d2b19/volumes" Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.729064 4876 scope.go:117] "RemoveContainer" containerID="b57785ec56b8398e128a1255d1f03207676a8dc4a586fe66bfca9221cf751ed7" Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.751701 4876 scope.go:117] "RemoveContainer" containerID="7070b3d3f2182ae72fbee70905b42763a2386161e4f93216e83ada18fb1fb34e" Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.807169 4876 scope.go:117] "RemoveContainer" containerID="8fd9be45165d412267a47276082eb7185c64df2b2cca136d546c892a348f129f" Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.859372 4876 scope.go:117] "RemoveContainer" containerID="03bd51b219b12a040b629a8cc59c12ee340153e8e2170b15b6b1f1a043ea937e" Dec 15 09:10:30 crc kubenswrapper[4876]: I1215 09:10:30.915602 4876 scope.go:117] "RemoveContainer" containerID="52720c96b580c90f6a945b4af3a957279e04a3c968e1d8b7b7a5b737f2bc29b9" Dec 15 09:10:43 crc kubenswrapper[4876]: I1215 09:10:43.044708 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-dqgdr"] Dec 15 09:10:43 crc kubenswrapper[4876]: I1215 09:10:43.055564 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-dqgdr"] Dec 15 09:10:44 crc kubenswrapper[4876]: I1215 09:10:44.742843 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d977fdb-c84c-4ec6-aae2-f97f005520ca" path="/var/lib/kubelet/pods/9d977fdb-c84c-4ec6-aae2-f97f005520ca/volumes" Dec 15 09:10:57 crc kubenswrapper[4876]: I1215 09:10:57.322750 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:10:57 crc kubenswrapper[4876]: I1215 09:10:57.324036 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:11:27 crc kubenswrapper[4876]: I1215 09:11:27.323007 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:11:27 crc kubenswrapper[4876]: I1215 09:11:27.323595 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:11:31 crc kubenswrapper[4876]: I1215 09:11:31.082447 4876 scope.go:117] "RemoveContainer" containerID="a60c9251fa3647ef7cefc1c85ce1c9cb6dc1d33c0bb8ed4e3363a1f6ffebe9fd" Dec 15 09:11:42 crc kubenswrapper[4876]: I1215 09:11:42.329473 4876 generic.go:334] "Generic (PLEG): container finished" podID="68234414-ac2d-447e-a5c5-4608dc77b5ed" containerID="da3ababd59c63baf0c9c15396d3eef2ae769e9e1a885f3a875a94481c9636f39" exitCode=0 Dec 15 09:11:42 crc kubenswrapper[4876]: I1215 09:11:42.329659 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" event={"ID":"68234414-ac2d-447e-a5c5-4608dc77b5ed","Type":"ContainerDied","Data":"da3ababd59c63baf0c9c15396d3eef2ae769e9e1a885f3a875a94481c9636f39"} Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.811854 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.894343 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-inventory\") pod \"68234414-ac2d-447e-a5c5-4608dc77b5ed\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.894427 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-tripleo-cleanup-combined-ca-bundle\") pod \"68234414-ac2d-447e-a5c5-4608dc77b5ed\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.894563 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbq59\" (UniqueName: \"kubernetes.io/projected/68234414-ac2d-447e-a5c5-4608dc77b5ed-kube-api-access-lbq59\") pod \"68234414-ac2d-447e-a5c5-4608dc77b5ed\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.894647 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ssh-key\") pod \"68234414-ac2d-447e-a5c5-4608dc77b5ed\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.894713 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ceph\") pod \"68234414-ac2d-447e-a5c5-4608dc77b5ed\" (UID: \"68234414-ac2d-447e-a5c5-4608dc77b5ed\") " Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.902219 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ceph" (OuterVolumeSpecName: "ceph") pod "68234414-ac2d-447e-a5c5-4608dc77b5ed" (UID: "68234414-ac2d-447e-a5c5-4608dc77b5ed"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.905379 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68234414-ac2d-447e-a5c5-4608dc77b5ed-kube-api-access-lbq59" (OuterVolumeSpecName: "kube-api-access-lbq59") pod "68234414-ac2d-447e-a5c5-4608dc77b5ed" (UID: "68234414-ac2d-447e-a5c5-4608dc77b5ed"). InnerVolumeSpecName "kube-api-access-lbq59". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.914903 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "68234414-ac2d-447e-a5c5-4608dc77b5ed" (UID: "68234414-ac2d-447e-a5c5-4608dc77b5ed"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.945204 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-inventory" (OuterVolumeSpecName: "inventory") pod "68234414-ac2d-447e-a5c5-4608dc77b5ed" (UID: "68234414-ac2d-447e-a5c5-4608dc77b5ed"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.965836 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "68234414-ac2d-447e-a5c5-4608dc77b5ed" (UID: "68234414-ac2d-447e-a5c5-4608dc77b5ed"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.996555 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbq59\" (UniqueName: \"kubernetes.io/projected/68234414-ac2d-447e-a5c5-4608dc77b5ed-kube-api-access-lbq59\") on node \"crc\" DevicePath \"\"" Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.996606 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.996619 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.996630 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:11:43 crc kubenswrapper[4876]: I1215 09:11:43.996641 4876 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68234414-ac2d-447e-a5c5-4608dc77b5ed-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:11:44 crc kubenswrapper[4876]: I1215 09:11:44.351680 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" event={"ID":"68234414-ac2d-447e-a5c5-4608dc77b5ed","Type":"ContainerDied","Data":"41a1c89fc5d334372ba95498ecb25254953b50cc636f81bc37b08ccfee6abacb"} Dec 15 09:11:44 crc kubenswrapper[4876]: I1215 09:11:44.352004 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41a1c89fc5d334372ba95498ecb25254953b50cc636f81bc37b08ccfee6abacb" Dec 15 09:11:44 crc kubenswrapper[4876]: I1215 09:11:44.351750 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.539670 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-m2xj6"] Dec 15 09:11:46 crc kubenswrapper[4876]: E1215 09:11:46.540537 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerName="extract-utilities" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.540558 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerName="extract-utilities" Dec 15 09:11:46 crc kubenswrapper[4876]: E1215 09:11:46.540576 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerName="extract-content" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.540584 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerName="extract-content" Dec 15 09:11:46 crc kubenswrapper[4876]: E1215 09:11:46.540612 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68234414-ac2d-447e-a5c5-4608dc77b5ed" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.540623 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="68234414-ac2d-447e-a5c5-4608dc77b5ed" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Dec 15 09:11:46 crc kubenswrapper[4876]: E1215 09:11:46.540639 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerName="registry-server" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.540647 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerName="registry-server" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.540909 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="68234414-ac2d-447e-a5c5-4608dc77b5ed" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.540930 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="365c08e7-1bba-4bb4-8ff2-d528eef4a2dc" containerName="registry-server" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.541994 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.546754 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.546968 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.547149 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.548851 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.550222 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-m2xj6"] Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.609311 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-networker-wjdpb"] Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.610839 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.612944 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.612989 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.625781 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-networker-wjdpb"] Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.647375 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.647676 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-ssh-key\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.647768 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-inventory\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.647956 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-inventory\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.648010 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjj8b\" (UniqueName: \"kubernetes.io/projected/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-kube-api-access-fjj8b\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.648070 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztlgl\" (UniqueName: \"kubernetes.io/projected/318cc9be-d461-48c7-83ee-f2a3cfe88b08-kube-api-access-ztlgl\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.648125 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.648253 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.648427 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ceph\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.750341 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-inventory\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.750396 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjj8b\" (UniqueName: \"kubernetes.io/projected/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-kube-api-access-fjj8b\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.750439 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztlgl\" (UniqueName: \"kubernetes.io/projected/318cc9be-d461-48c7-83ee-f2a3cfe88b08-kube-api-access-ztlgl\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.750472 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.750566 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.750709 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ceph\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.750734 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.750764 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-ssh-key\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.750788 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-inventory\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.757080 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.757093 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-inventory\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.757503 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-ssh-key\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.758957 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-inventory\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.760589 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.763745 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ceph\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.764460 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.768896 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjj8b\" (UniqueName: \"kubernetes.io/projected/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-kube-api-access-fjj8b\") pod \"bootstrap-openstack-openstack-cell1-m2xj6\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.769265 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztlgl\" (UniqueName: \"kubernetes.io/projected/318cc9be-d461-48c7-83ee-f2a3cfe88b08-kube-api-access-ztlgl\") pod \"bootstrap-openstack-openstack-networker-wjdpb\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.872670 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:11:46 crc kubenswrapper[4876]: I1215 09:11:46.929957 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:11:47 crc kubenswrapper[4876]: I1215 09:11:47.418898 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-m2xj6"] Dec 15 09:11:47 crc kubenswrapper[4876]: I1215 09:11:47.578834 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-networker-wjdpb"] Dec 15 09:11:47 crc kubenswrapper[4876]: W1215 09:11:47.862897 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod318cc9be_d461_48c7_83ee_f2a3cfe88b08.slice/crio-a10612550e7292418cdcd6e21096c7c879e63335301cfaf5497f4ed5b40766e6 WatchSource:0}: Error finding container a10612550e7292418cdcd6e21096c7c879e63335301cfaf5497f4ed5b40766e6: Status 404 returned error can't find the container with id a10612550e7292418cdcd6e21096c7c879e63335301cfaf5497f4ed5b40766e6 Dec 15 09:11:48 crc kubenswrapper[4876]: I1215 09:11:48.408040 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" event={"ID":"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2","Type":"ContainerStarted","Data":"b356ceba881103bdf3b1a1e3e0b5ff3cb153574b86cc05521fdd764b30116970"} Dec 15 09:11:48 crc kubenswrapper[4876]: I1215 09:11:48.410796 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" event={"ID":"318cc9be-d461-48c7-83ee-f2a3cfe88b08","Type":"ContainerStarted","Data":"a10612550e7292418cdcd6e21096c7c879e63335301cfaf5497f4ed5b40766e6"} Dec 15 09:11:49 crc kubenswrapper[4876]: I1215 09:11:49.420753 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" event={"ID":"318cc9be-d461-48c7-83ee-f2a3cfe88b08","Type":"ContainerStarted","Data":"9ab8998d3acfa1f8b21ec44c0c9a8d504228a790f33b2f5a997a876a1f118553"} Dec 15 09:11:49 crc kubenswrapper[4876]: I1215 09:11:49.424019 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" event={"ID":"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2","Type":"ContainerStarted","Data":"52df9deb5c33b0096b5dbc715eca61830fb94376e08ddac17cc6997a01a26cd2"} Dec 15 09:11:49 crc kubenswrapper[4876]: I1215 09:11:49.446897 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" podStartSLOduration=3.023358966 podStartE2EDuration="3.446876424s" podCreationTimestamp="2025-12-15 09:11:46 +0000 UTC" firstStartedPulling="2025-12-15 09:11:47.868393113 +0000 UTC m=+8433.439536024" lastFinishedPulling="2025-12-15 09:11:48.291910571 +0000 UTC m=+8433.863053482" observedRunningTime="2025-12-15 09:11:49.444021127 +0000 UTC m=+8435.015164048" watchObservedRunningTime="2025-12-15 09:11:49.446876424 +0000 UTC m=+8435.018019345" Dec 15 09:11:49 crc kubenswrapper[4876]: I1215 09:11:49.466055 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" podStartSLOduration=2.915508194 podStartE2EDuration="3.46602834s" podCreationTimestamp="2025-12-15 09:11:46 +0000 UTC" firstStartedPulling="2025-12-15 09:11:47.863007398 +0000 UTC m=+8433.434150329" lastFinishedPulling="2025-12-15 09:11:48.413527564 +0000 UTC m=+8433.984670475" observedRunningTime="2025-12-15 09:11:49.456290498 +0000 UTC m=+8435.027433429" watchObservedRunningTime="2025-12-15 09:11:49.46602834 +0000 UTC m=+8435.037171261" Dec 15 09:11:57 crc kubenswrapper[4876]: I1215 09:11:57.322255 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:11:57 crc kubenswrapper[4876]: I1215 09:11:57.322753 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:11:57 crc kubenswrapper[4876]: I1215 09:11:57.322792 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:11:57 crc kubenswrapper[4876]: I1215 09:11:57.323544 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:11:57 crc kubenswrapper[4876]: I1215 09:11:57.323809 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" gracePeriod=600 Dec 15 09:11:57 crc kubenswrapper[4876]: E1215 09:11:57.451970 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:11:57 crc kubenswrapper[4876]: I1215 09:11:57.493854 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" exitCode=0 Dec 15 09:11:57 crc kubenswrapper[4876]: I1215 09:11:57.493897 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3"} Dec 15 09:11:57 crc kubenswrapper[4876]: I1215 09:11:57.493928 4876 scope.go:117] "RemoveContainer" containerID="ef61df80dc5aaaee61bd2f22ab0be6b3a54ac7a89c0fc277e3ecfb09d65b986e" Dec 15 09:11:57 crc kubenswrapper[4876]: I1215 09:11:57.495566 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:11:57 crc kubenswrapper[4876]: E1215 09:11:57.495830 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:12:10 crc kubenswrapper[4876]: I1215 09:12:10.706254 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:12:10 crc kubenswrapper[4876]: E1215 09:12:10.707237 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:12:22 crc kubenswrapper[4876]: I1215 09:12:22.706332 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:12:22 crc kubenswrapper[4876]: E1215 09:12:22.707580 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.452726 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d227h"] Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.456776 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.480806 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d227h"] Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.535862 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-utilities\") pod \"redhat-marketplace-d227h\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.535934 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8mp4\" (UniqueName: \"kubernetes.io/projected/3632d407-06a8-4a2c-af93-fff00386873e-kube-api-access-w8mp4\") pod \"redhat-marketplace-d227h\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.536496 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-catalog-content\") pod \"redhat-marketplace-d227h\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.638914 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-catalog-content\") pod \"redhat-marketplace-d227h\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.639018 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-utilities\") pod \"redhat-marketplace-d227h\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.639072 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8mp4\" (UniqueName: \"kubernetes.io/projected/3632d407-06a8-4a2c-af93-fff00386873e-kube-api-access-w8mp4\") pod \"redhat-marketplace-d227h\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.639549 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-catalog-content\") pod \"redhat-marketplace-d227h\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.639605 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-utilities\") pod \"redhat-marketplace-d227h\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.660752 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8mp4\" (UniqueName: \"kubernetes.io/projected/3632d407-06a8-4a2c-af93-fff00386873e-kube-api-access-w8mp4\") pod \"redhat-marketplace-d227h\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:29 crc kubenswrapper[4876]: I1215 09:12:29.783678 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:30 crc kubenswrapper[4876]: I1215 09:12:30.242472 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d227h"] Dec 15 09:12:30 crc kubenswrapper[4876]: I1215 09:12:30.876435 4876 generic.go:334] "Generic (PLEG): container finished" podID="3632d407-06a8-4a2c-af93-fff00386873e" containerID="51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b" exitCode=0 Dec 15 09:12:30 crc kubenswrapper[4876]: I1215 09:12:30.876738 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d227h" event={"ID":"3632d407-06a8-4a2c-af93-fff00386873e","Type":"ContainerDied","Data":"51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b"} Dec 15 09:12:30 crc kubenswrapper[4876]: I1215 09:12:30.876796 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d227h" event={"ID":"3632d407-06a8-4a2c-af93-fff00386873e","Type":"ContainerStarted","Data":"0f100a8754539bca66201759d54421880f563631f5c6c073ca55f6fe94119811"} Dec 15 09:12:32 crc kubenswrapper[4876]: I1215 09:12:32.901833 4876 generic.go:334] "Generic (PLEG): container finished" podID="3632d407-06a8-4a2c-af93-fff00386873e" containerID="981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5" exitCode=0 Dec 15 09:12:32 crc kubenswrapper[4876]: I1215 09:12:32.901875 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d227h" event={"ID":"3632d407-06a8-4a2c-af93-fff00386873e","Type":"ContainerDied","Data":"981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5"} Dec 15 09:12:33 crc kubenswrapper[4876]: I1215 09:12:33.912742 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d227h" event={"ID":"3632d407-06a8-4a2c-af93-fff00386873e","Type":"ContainerStarted","Data":"22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8"} Dec 15 09:12:33 crc kubenswrapper[4876]: I1215 09:12:33.940616 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d227h" podStartSLOduration=2.366286732 podStartE2EDuration="4.940600023s" podCreationTimestamp="2025-12-15 09:12:29 +0000 UTC" firstStartedPulling="2025-12-15 09:12:30.87799076 +0000 UTC m=+8476.449133671" lastFinishedPulling="2025-12-15 09:12:33.452304051 +0000 UTC m=+8479.023446962" observedRunningTime="2025-12-15 09:12:33.927839139 +0000 UTC m=+8479.498982060" watchObservedRunningTime="2025-12-15 09:12:33.940600023 +0000 UTC m=+8479.511742954" Dec 15 09:12:36 crc kubenswrapper[4876]: I1215 09:12:36.705821 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:12:36 crc kubenswrapper[4876]: E1215 09:12:36.706466 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:12:39 crc kubenswrapper[4876]: I1215 09:12:39.784761 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:39 crc kubenswrapper[4876]: I1215 09:12:39.786031 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:39 crc kubenswrapper[4876]: I1215 09:12:39.839420 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:40 crc kubenswrapper[4876]: I1215 09:12:40.028655 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:40 crc kubenswrapper[4876]: I1215 09:12:40.082814 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d227h"] Dec 15 09:12:41 crc kubenswrapper[4876]: I1215 09:12:41.994451 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d227h" podUID="3632d407-06a8-4a2c-af93-fff00386873e" containerName="registry-server" containerID="cri-o://22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8" gracePeriod=2 Dec 15 09:12:42 crc kubenswrapper[4876]: I1215 09:12:42.544765 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:42 crc kubenswrapper[4876]: I1215 09:12:42.640974 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-catalog-content\") pod \"3632d407-06a8-4a2c-af93-fff00386873e\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " Dec 15 09:12:42 crc kubenswrapper[4876]: I1215 09:12:42.641137 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8mp4\" (UniqueName: \"kubernetes.io/projected/3632d407-06a8-4a2c-af93-fff00386873e-kube-api-access-w8mp4\") pod \"3632d407-06a8-4a2c-af93-fff00386873e\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " Dec 15 09:12:42 crc kubenswrapper[4876]: I1215 09:12:42.641187 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-utilities\") pod \"3632d407-06a8-4a2c-af93-fff00386873e\" (UID: \"3632d407-06a8-4a2c-af93-fff00386873e\") " Dec 15 09:12:42 crc kubenswrapper[4876]: I1215 09:12:42.643446 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-utilities" (OuterVolumeSpecName: "utilities") pod "3632d407-06a8-4a2c-af93-fff00386873e" (UID: "3632d407-06a8-4a2c-af93-fff00386873e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:12:42 crc kubenswrapper[4876]: I1215 09:12:42.651578 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3632d407-06a8-4a2c-af93-fff00386873e-kube-api-access-w8mp4" (OuterVolumeSpecName: "kube-api-access-w8mp4") pod "3632d407-06a8-4a2c-af93-fff00386873e" (UID: "3632d407-06a8-4a2c-af93-fff00386873e"). InnerVolumeSpecName "kube-api-access-w8mp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:12:42 crc kubenswrapper[4876]: I1215 09:12:42.667261 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3632d407-06a8-4a2c-af93-fff00386873e" (UID: "3632d407-06a8-4a2c-af93-fff00386873e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:12:42 crc kubenswrapper[4876]: I1215 09:12:42.744197 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:12:42 crc kubenswrapper[4876]: I1215 09:12:42.744236 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8mp4\" (UniqueName: \"kubernetes.io/projected/3632d407-06a8-4a2c-af93-fff00386873e-kube-api-access-w8mp4\") on node \"crc\" DevicePath \"\"" Dec 15 09:12:42 crc kubenswrapper[4876]: I1215 09:12:42.744246 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3632d407-06a8-4a2c-af93-fff00386873e-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.004650 4876 generic.go:334] "Generic (PLEG): container finished" podID="3632d407-06a8-4a2c-af93-fff00386873e" containerID="22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8" exitCode=0 Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.004709 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d227h" event={"ID":"3632d407-06a8-4a2c-af93-fff00386873e","Type":"ContainerDied","Data":"22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8"} Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.004719 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d227h" Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.004740 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d227h" event={"ID":"3632d407-06a8-4a2c-af93-fff00386873e","Type":"ContainerDied","Data":"0f100a8754539bca66201759d54421880f563631f5c6c073ca55f6fe94119811"} Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.004757 4876 scope.go:117] "RemoveContainer" containerID="22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8" Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.028242 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d227h"] Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.031120 4876 scope.go:117] "RemoveContainer" containerID="981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5" Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.038007 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d227h"] Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.055998 4876 scope.go:117] "RemoveContainer" containerID="51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b" Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.094121 4876 scope.go:117] "RemoveContainer" containerID="22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8" Dec 15 09:12:43 crc kubenswrapper[4876]: E1215 09:12:43.094780 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8\": container with ID starting with 22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8 not found: ID does not exist" containerID="22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8" Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.094872 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8"} err="failed to get container status \"22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8\": rpc error: code = NotFound desc = could not find container \"22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8\": container with ID starting with 22e587b7da19bc9d8f0cc55facea2118f477a006d40d16cac1c1398d251bdee8 not found: ID does not exist" Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.094906 4876 scope.go:117] "RemoveContainer" containerID="981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5" Dec 15 09:12:43 crc kubenswrapper[4876]: E1215 09:12:43.095400 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5\": container with ID starting with 981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5 not found: ID does not exist" containerID="981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5" Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.095434 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5"} err="failed to get container status \"981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5\": rpc error: code = NotFound desc = could not find container \"981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5\": container with ID starting with 981d008068bbf0f96f3173266ed662ff8a698a465278a96388c111d5f7737bc5 not found: ID does not exist" Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.095493 4876 scope.go:117] "RemoveContainer" containerID="51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b" Dec 15 09:12:43 crc kubenswrapper[4876]: E1215 09:12:43.095865 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b\": container with ID starting with 51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b not found: ID does not exist" containerID="51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b" Dec 15 09:12:43 crc kubenswrapper[4876]: I1215 09:12:43.095906 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b"} err="failed to get container status \"51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b\": rpc error: code = NotFound desc = could not find container \"51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b\": container with ID starting with 51233fc455fc5628f0a2e585100516c8f7360b4cb76539b57c6a064b8237e15b not found: ID does not exist" Dec 15 09:12:44 crc kubenswrapper[4876]: I1215 09:12:44.725296 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3632d407-06a8-4a2c-af93-fff00386873e" path="/var/lib/kubelet/pods/3632d407-06a8-4a2c-af93-fff00386873e/volumes" Dec 15 09:12:49 crc kubenswrapper[4876]: I1215 09:12:49.705637 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:12:49 crc kubenswrapper[4876]: E1215 09:12:49.706514 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:13:03 crc kubenswrapper[4876]: I1215 09:13:03.706255 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:13:03 crc kubenswrapper[4876]: E1215 09:13:03.707144 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:13:17 crc kubenswrapper[4876]: I1215 09:13:17.705875 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:13:17 crc kubenswrapper[4876]: E1215 09:13:17.707394 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:13:30 crc kubenswrapper[4876]: I1215 09:13:30.705790 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:13:30 crc kubenswrapper[4876]: E1215 09:13:30.706734 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:13:44 crc kubenswrapper[4876]: I1215 09:13:44.714518 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:13:44 crc kubenswrapper[4876]: E1215 09:13:44.715604 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:13:56 crc kubenswrapper[4876]: I1215 09:13:56.705704 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:13:56 crc kubenswrapper[4876]: E1215 09:13:56.706501 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:14:08 crc kubenswrapper[4876]: I1215 09:14:08.706185 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:14:08 crc kubenswrapper[4876]: E1215 09:14:08.706987 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:14:20 crc kubenswrapper[4876]: I1215 09:14:20.706816 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:14:20 crc kubenswrapper[4876]: E1215 09:14:20.708721 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:14:32 crc kubenswrapper[4876]: I1215 09:14:32.706420 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:14:32 crc kubenswrapper[4876]: E1215 09:14:32.707299 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:14:47 crc kubenswrapper[4876]: I1215 09:14:47.705220 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:14:47 crc kubenswrapper[4876]: E1215 09:14:47.706012 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:14:50 crc kubenswrapper[4876]: I1215 09:14:50.112878 4876 generic.go:334] "Generic (PLEG): container finished" podID="c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2" containerID="52df9deb5c33b0096b5dbc715eca61830fb94376e08ddac17cc6997a01a26cd2" exitCode=0 Dec 15 09:14:50 crc kubenswrapper[4876]: I1215 09:14:50.112994 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" event={"ID":"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2","Type":"ContainerDied","Data":"52df9deb5c33b0096b5dbc715eca61830fb94376e08ddac17cc6997a01a26cd2"} Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.369247 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vflnr"] Dec 15 09:14:51 crc kubenswrapper[4876]: E1215 09:14:51.371387 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3632d407-06a8-4a2c-af93-fff00386873e" containerName="registry-server" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.371430 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3632d407-06a8-4a2c-af93-fff00386873e" containerName="registry-server" Dec 15 09:14:51 crc kubenswrapper[4876]: E1215 09:14:51.371467 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3632d407-06a8-4a2c-af93-fff00386873e" containerName="extract-utilities" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.371477 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3632d407-06a8-4a2c-af93-fff00386873e" containerName="extract-utilities" Dec 15 09:14:51 crc kubenswrapper[4876]: E1215 09:14:51.371504 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3632d407-06a8-4a2c-af93-fff00386873e" containerName="extract-content" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.371512 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3632d407-06a8-4a2c-af93-fff00386873e" containerName="extract-content" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.371737 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3632d407-06a8-4a2c-af93-fff00386873e" containerName="registry-server" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.378051 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.379924 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vflnr"] Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.486082 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wn65\" (UniqueName: \"kubernetes.io/projected/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-kube-api-access-2wn65\") pod \"certified-operators-vflnr\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.486281 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-utilities\") pod \"certified-operators-vflnr\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.486323 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-catalog-content\") pod \"certified-operators-vflnr\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.527785 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.587949 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-inventory\") pod \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.588136 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjj8b\" (UniqueName: \"kubernetes.io/projected/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-kube-api-access-fjj8b\") pod \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.588192 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ceph\") pod \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.588317 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ssh-key\") pod \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.588396 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-bootstrap-combined-ca-bundle\") pod \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\" (UID: \"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2\") " Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.588669 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-utilities\") pod \"certified-operators-vflnr\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.588712 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-catalog-content\") pod \"certified-operators-vflnr\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.588809 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wn65\" (UniqueName: \"kubernetes.io/projected/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-kube-api-access-2wn65\") pod \"certified-operators-vflnr\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.591176 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-utilities\") pod \"certified-operators-vflnr\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.595073 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-catalog-content\") pod \"certified-operators-vflnr\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.598728 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-kube-api-access-fjj8b" (OuterVolumeSpecName: "kube-api-access-fjj8b") pod "c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2" (UID: "c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2"). InnerVolumeSpecName "kube-api-access-fjj8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.604193 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2" (UID: "c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.605946 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wn65\" (UniqueName: \"kubernetes.io/projected/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-kube-api-access-2wn65\") pod \"certified-operators-vflnr\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.624249 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ceph" (OuterVolumeSpecName: "ceph") pod "c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2" (UID: "c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.633119 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2" (UID: "c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.657297 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-inventory" (OuterVolumeSpecName: "inventory") pod "c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2" (UID: "c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.690692 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjj8b\" (UniqueName: \"kubernetes.io/projected/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-kube-api-access-fjj8b\") on node \"crc\" DevicePath \"\"" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.690728 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.690739 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.690750 4876 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.690758 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:14:51 crc kubenswrapper[4876]: I1215 09:14:51.704438 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.137636 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" event={"ID":"c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2","Type":"ContainerDied","Data":"b356ceba881103bdf3b1a1e3e0b5ff3cb153574b86cc05521fdd764b30116970"} Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.137944 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b356ceba881103bdf3b1a1e3e0b5ff3cb153574b86cc05521fdd764b30116970" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.137686 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-m2xj6" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.238633 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-799z9"] Dec 15 09:14:52 crc kubenswrapper[4876]: E1215 09:14:52.239376 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2" containerName="bootstrap-openstack-openstack-cell1" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.239393 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2" containerName="bootstrap-openstack-openstack-cell1" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.239680 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2" containerName="bootstrap-openstack-openstack-cell1" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.240582 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.243596 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.243988 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.262245 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vflnr"] Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.279206 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-799z9"] Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.303860 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-inventory\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.303916 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ceph\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.303962 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ssh-key\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.304048 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzt4g\" (UniqueName: \"kubernetes.io/projected/ca266618-8aff-4574-8a86-d987f671b431-kube-api-access-bzt4g\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.405403 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzt4g\" (UniqueName: \"kubernetes.io/projected/ca266618-8aff-4574-8a86-d987f671b431-kube-api-access-bzt4g\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.405592 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-inventory\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.405634 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ceph\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.405662 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ssh-key\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.411531 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ssh-key\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.414218 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ceph\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.415003 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-inventory\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.425510 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzt4g\" (UniqueName: \"kubernetes.io/projected/ca266618-8aff-4574-8a86-d987f671b431-kube-api-access-bzt4g\") pod \"download-cache-openstack-openstack-cell1-799z9\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:52 crc kubenswrapper[4876]: I1215 09:14:52.564927 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:14:53 crc kubenswrapper[4876]: I1215 09:14:53.134719 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-799z9"] Dec 15 09:14:53 crc kubenswrapper[4876]: I1215 09:14:53.142161 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 09:14:53 crc kubenswrapper[4876]: I1215 09:14:53.152580 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-799z9" event={"ID":"ca266618-8aff-4574-8a86-d987f671b431","Type":"ContainerStarted","Data":"892b79bfc9bebc4eb5c6d63128eca2888a0bd10687d03af9b57f7d77ec4b8055"} Dec 15 09:14:53 crc kubenswrapper[4876]: I1215 09:14:53.155360 4876 generic.go:334] "Generic (PLEG): container finished" podID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerID="dfea9cb428fc6dbbfc9233d5f7fbdeb7184285a3dfb12593a4c3158ffa936227" exitCode=0 Dec 15 09:14:53 crc kubenswrapper[4876]: I1215 09:14:53.155430 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vflnr" event={"ID":"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14","Type":"ContainerDied","Data":"dfea9cb428fc6dbbfc9233d5f7fbdeb7184285a3dfb12593a4c3158ffa936227"} Dec 15 09:14:53 crc kubenswrapper[4876]: I1215 09:14:53.155732 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vflnr" event={"ID":"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14","Type":"ContainerStarted","Data":"dca7350f3c5f6e0e02eff5bf272199842467cd98c1041ad8252e7517bccc4e47"} Dec 15 09:14:55 crc kubenswrapper[4876]: I1215 09:14:55.173737 4876 generic.go:334] "Generic (PLEG): container finished" podID="318cc9be-d461-48c7-83ee-f2a3cfe88b08" containerID="9ab8998d3acfa1f8b21ec44c0c9a8d504228a790f33b2f5a997a876a1f118553" exitCode=0 Dec 15 09:14:55 crc kubenswrapper[4876]: I1215 09:14:55.173781 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" event={"ID":"318cc9be-d461-48c7-83ee-f2a3cfe88b08","Type":"ContainerDied","Data":"9ab8998d3acfa1f8b21ec44c0c9a8d504228a790f33b2f5a997a876a1f118553"} Dec 15 09:14:56 crc kubenswrapper[4876]: I1215 09:14:56.190829 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-799z9" event={"ID":"ca266618-8aff-4574-8a86-d987f671b431","Type":"ContainerStarted","Data":"e4d2cdb4e49810481b46e0fca4ba2afb04b3535cde4639d45ce6f87df4c81cc3"} Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.087431 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.116667 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-799z9" podStartSLOduration=4.203756834 podStartE2EDuration="6.116646428s" podCreationTimestamp="2025-12-15 09:14:52 +0000 UTC" firstStartedPulling="2025-12-15 09:14:53.141784874 +0000 UTC m=+8618.712927785" lastFinishedPulling="2025-12-15 09:14:55.054674468 +0000 UTC m=+8620.625817379" observedRunningTime="2025-12-15 09:14:56.219574599 +0000 UTC m=+8621.790717520" watchObservedRunningTime="2025-12-15 09:14:58.116646428 +0000 UTC m=+8623.687789339" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.213256 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" event={"ID":"318cc9be-d461-48c7-83ee-f2a3cfe88b08","Type":"ContainerDied","Data":"a10612550e7292418cdcd6e21096c7c879e63335301cfaf5497f4ed5b40766e6"} Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.213296 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a10612550e7292418cdcd6e21096c7c879e63335301cfaf5497f4ed5b40766e6" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.213312 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-networker-wjdpb" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.240950 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztlgl\" (UniqueName: \"kubernetes.io/projected/318cc9be-d461-48c7-83ee-f2a3cfe88b08-kube-api-access-ztlgl\") pod \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.241149 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-bootstrap-combined-ca-bundle\") pod \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.241221 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-inventory\") pod \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.241245 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-ssh-key\") pod \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\" (UID: \"318cc9be-d461-48c7-83ee-f2a3cfe88b08\") " Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.249301 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/318cc9be-d461-48c7-83ee-f2a3cfe88b08-kube-api-access-ztlgl" (OuterVolumeSpecName: "kube-api-access-ztlgl") pod "318cc9be-d461-48c7-83ee-f2a3cfe88b08" (UID: "318cc9be-d461-48c7-83ee-f2a3cfe88b08"). InnerVolumeSpecName "kube-api-access-ztlgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.249459 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "318cc9be-d461-48c7-83ee-f2a3cfe88b08" (UID: "318cc9be-d461-48c7-83ee-f2a3cfe88b08"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.282227 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "318cc9be-d461-48c7-83ee-f2a3cfe88b08" (UID: "318cc9be-d461-48c7-83ee-f2a3cfe88b08"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.298162 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-inventory" (OuterVolumeSpecName: "inventory") pod "318cc9be-d461-48c7-83ee-f2a3cfe88b08" (UID: "318cc9be-d461-48c7-83ee-f2a3cfe88b08"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.344015 4876 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.344399 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.344411 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/318cc9be-d461-48c7-83ee-f2a3cfe88b08-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.344424 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztlgl\" (UniqueName: \"kubernetes.io/projected/318cc9be-d461-48c7-83ee-f2a3cfe88b08-kube-api-access-ztlgl\") on node \"crc\" DevicePath \"\"" Dec 15 09:14:58 crc kubenswrapper[4876]: I1215 09:14:58.707273 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:14:58 crc kubenswrapper[4876]: E1215 09:14:58.707594 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.174745 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-networker-2f7nm"] Dec 15 09:14:59 crc kubenswrapper[4876]: E1215 09:14:59.175237 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="318cc9be-d461-48c7-83ee-f2a3cfe88b08" containerName="bootstrap-openstack-openstack-networker" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.175255 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="318cc9be-d461-48c7-83ee-f2a3cfe88b08" containerName="bootstrap-openstack-openstack-networker" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.175546 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="318cc9be-d461-48c7-83ee-f2a3cfe88b08" containerName="bootstrap-openstack-openstack-networker" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.176349 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.178452 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.178879 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.184500 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-networker-2f7nm"] Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.225591 4876 generic.go:334] "Generic (PLEG): container finished" podID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerID="2340663bbba63fda8c94e5ce74aa0415f0c98751348cac36a2dcb5bfec750cd5" exitCode=0 Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.225662 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vflnr" event={"ID":"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14","Type":"ContainerDied","Data":"2340663bbba63fda8c94e5ce74aa0415f0c98751348cac36a2dcb5bfec750cd5"} Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.264481 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-inventory\") pod \"download-cache-openstack-openstack-networker-2f7nm\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.264579 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpf5d\" (UniqueName: \"kubernetes.io/projected/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-kube-api-access-dpf5d\") pod \"download-cache-openstack-openstack-networker-2f7nm\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.264609 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-ssh-key\") pod \"download-cache-openstack-openstack-networker-2f7nm\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.367986 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-inventory\") pod \"download-cache-openstack-openstack-networker-2f7nm\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.368200 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpf5d\" (UniqueName: \"kubernetes.io/projected/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-kube-api-access-dpf5d\") pod \"download-cache-openstack-openstack-networker-2f7nm\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.368232 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-ssh-key\") pod \"download-cache-openstack-openstack-networker-2f7nm\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.854278 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpf5d\" (UniqueName: \"kubernetes.io/projected/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-kube-api-access-dpf5d\") pod \"download-cache-openstack-openstack-networker-2f7nm\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.854837 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-inventory\") pod \"download-cache-openstack-openstack-networker-2f7nm\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:14:59 crc kubenswrapper[4876]: I1215 09:14:59.854946 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-ssh-key\") pod \"download-cache-openstack-openstack-networker-2f7nm\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.093607 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.153887 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp"] Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.155901 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.158822 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.160663 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.182091 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp"] Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.240391 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vflnr" event={"ID":"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14","Type":"ContainerStarted","Data":"3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67"} Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.270337 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vflnr" podStartSLOduration=2.642824222 podStartE2EDuration="9.270310709s" podCreationTimestamp="2025-12-15 09:14:51 +0000 UTC" firstStartedPulling="2025-12-15 09:14:53.157358592 +0000 UTC m=+8618.728501503" lastFinishedPulling="2025-12-15 09:14:59.784845079 +0000 UTC m=+8625.355987990" observedRunningTime="2025-12-15 09:15:00.258723528 +0000 UTC m=+8625.829866439" watchObservedRunningTime="2025-12-15 09:15:00.270310709 +0000 UTC m=+8625.841453620" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.291358 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4rpn\" (UniqueName: \"kubernetes.io/projected/c04c1272-651f-4964-bd1b-5fae3e479f8e-kube-api-access-g4rpn\") pod \"collect-profiles-29429835-jvsjp\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.291446 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c04c1272-651f-4964-bd1b-5fae3e479f8e-config-volume\") pod \"collect-profiles-29429835-jvsjp\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.291593 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c04c1272-651f-4964-bd1b-5fae3e479f8e-secret-volume\") pod \"collect-profiles-29429835-jvsjp\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.393027 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c04c1272-651f-4964-bd1b-5fae3e479f8e-secret-volume\") pod \"collect-profiles-29429835-jvsjp\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.397607 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4rpn\" (UniqueName: \"kubernetes.io/projected/c04c1272-651f-4964-bd1b-5fae3e479f8e-kube-api-access-g4rpn\") pod \"collect-profiles-29429835-jvsjp\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.397670 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c04c1272-651f-4964-bd1b-5fae3e479f8e-config-volume\") pod \"collect-profiles-29429835-jvsjp\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.398695 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c04c1272-651f-4964-bd1b-5fae3e479f8e-config-volume\") pod \"collect-profiles-29429835-jvsjp\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.422243 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c04c1272-651f-4964-bd1b-5fae3e479f8e-secret-volume\") pod \"collect-profiles-29429835-jvsjp\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.435909 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4rpn\" (UniqueName: \"kubernetes.io/projected/c04c1272-651f-4964-bd1b-5fae3e479f8e-kube-api-access-g4rpn\") pod \"collect-profiles-29429835-jvsjp\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.581903 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:00 crc kubenswrapper[4876]: I1215 09:15:00.740088 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-networker-2f7nm"] Dec 15 09:15:01 crc kubenswrapper[4876]: I1215 09:15:01.054701 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp"] Dec 15 09:15:01 crc kubenswrapper[4876]: I1215 09:15:01.258609 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-networker-2f7nm" event={"ID":"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf","Type":"ContainerStarted","Data":"ad6658037e956f89b8baa3d0d401e18363e135a814dcd63c0aa4ed0ce76bb844"} Dec 15 09:15:01 crc kubenswrapper[4876]: I1215 09:15:01.260190 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" event={"ID":"c04c1272-651f-4964-bd1b-5fae3e479f8e","Type":"ContainerStarted","Data":"1d04b52d8d11a7c3e839fc02e0cec728dcc97e72819d3135ffd8f18ecb3a5325"} Dec 15 09:15:01 crc kubenswrapper[4876]: I1215 09:15:01.705621 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:15:01 crc kubenswrapper[4876]: I1215 09:15:01.705676 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:15:02 crc kubenswrapper[4876]: I1215 09:15:02.269875 4876 generic.go:334] "Generic (PLEG): container finished" podID="c04c1272-651f-4964-bd1b-5fae3e479f8e" containerID="807bb49da6c36f0b0e8e8e7c40318dc01cba9b8b839ac8ff5d8c22d7be167466" exitCode=0 Dec 15 09:15:02 crc kubenswrapper[4876]: I1215 09:15:02.270254 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" event={"ID":"c04c1272-651f-4964-bd1b-5fae3e479f8e","Type":"ContainerDied","Data":"807bb49da6c36f0b0e8e8e7c40318dc01cba9b8b839ac8ff5d8c22d7be167466"} Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.012950 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-vflnr" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerName="registry-server" probeResult="failure" output=< Dec 15 09:15:03 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 09:15:03 crc kubenswrapper[4876]: > Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.281051 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-networker-2f7nm" event={"ID":"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf","Type":"ContainerStarted","Data":"71f5e4fb308f32f3e7c127ae5eab351c9cb3b36033dde70f89acbca89f196153"} Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.307566 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-networker-2f7nm" podStartSLOduration=2.473334946 podStartE2EDuration="4.307545517s" podCreationTimestamp="2025-12-15 09:14:59 +0000 UTC" firstStartedPulling="2025-12-15 09:15:00.758072942 +0000 UTC m=+8626.329215853" lastFinishedPulling="2025-12-15 09:15:02.592283513 +0000 UTC m=+8628.163426424" observedRunningTime="2025-12-15 09:15:03.300636981 +0000 UTC m=+8628.871779892" watchObservedRunningTime="2025-12-15 09:15:03.307545517 +0000 UTC m=+8628.878688438" Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.720443 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.876864 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c04c1272-651f-4964-bd1b-5fae3e479f8e-config-volume\") pod \"c04c1272-651f-4964-bd1b-5fae3e479f8e\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.877234 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4rpn\" (UniqueName: \"kubernetes.io/projected/c04c1272-651f-4964-bd1b-5fae3e479f8e-kube-api-access-g4rpn\") pod \"c04c1272-651f-4964-bd1b-5fae3e479f8e\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.878408 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c04c1272-651f-4964-bd1b-5fae3e479f8e-config-volume" (OuterVolumeSpecName: "config-volume") pod "c04c1272-651f-4964-bd1b-5fae3e479f8e" (UID: "c04c1272-651f-4964-bd1b-5fae3e479f8e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.878603 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c04c1272-651f-4964-bd1b-5fae3e479f8e-secret-volume\") pod \"c04c1272-651f-4964-bd1b-5fae3e479f8e\" (UID: \"c04c1272-651f-4964-bd1b-5fae3e479f8e\") " Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.879161 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c04c1272-651f-4964-bd1b-5fae3e479f8e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.882950 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c04c1272-651f-4964-bd1b-5fae3e479f8e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c04c1272-651f-4964-bd1b-5fae3e479f8e" (UID: "c04c1272-651f-4964-bd1b-5fae3e479f8e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.895434 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c04c1272-651f-4964-bd1b-5fae3e479f8e-kube-api-access-g4rpn" (OuterVolumeSpecName: "kube-api-access-g4rpn") pod "c04c1272-651f-4964-bd1b-5fae3e479f8e" (UID: "c04c1272-651f-4964-bd1b-5fae3e479f8e"). InnerVolumeSpecName "kube-api-access-g4rpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.989068 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4rpn\" (UniqueName: \"kubernetes.io/projected/c04c1272-651f-4964-bd1b-5fae3e479f8e-kube-api-access-g4rpn\") on node \"crc\" DevicePath \"\"" Dec 15 09:15:03 crc kubenswrapper[4876]: I1215 09:15:03.989117 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c04c1272-651f-4964-bd1b-5fae3e479f8e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 09:15:04 crc kubenswrapper[4876]: I1215 09:15:04.291558 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" event={"ID":"c04c1272-651f-4964-bd1b-5fae3e479f8e","Type":"ContainerDied","Data":"1d04b52d8d11a7c3e839fc02e0cec728dcc97e72819d3135ffd8f18ecb3a5325"} Dec 15 09:15:04 crc kubenswrapper[4876]: I1215 09:15:04.291938 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d04b52d8d11a7c3e839fc02e0cec728dcc97e72819d3135ffd8f18ecb3a5325" Dec 15 09:15:04 crc kubenswrapper[4876]: I1215 09:15:04.291613 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp" Dec 15 09:15:04 crc kubenswrapper[4876]: I1215 09:15:04.802275 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t"] Dec 15 09:15:04 crc kubenswrapper[4876]: I1215 09:15:04.812338 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429790-nps5t"] Dec 15 09:15:06 crc kubenswrapper[4876]: I1215 09:15:06.718748 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2637c57c-34ea-4a9a-860f-b8cc1b5adec4" path="/var/lib/kubelet/pods/2637c57c-34ea-4a9a-860f-b8cc1b5adec4/volumes" Dec 15 09:15:09 crc kubenswrapper[4876]: I1215 09:15:09.706178 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:15:09 crc kubenswrapper[4876]: E1215 09:15:09.707055 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:15:12 crc kubenswrapper[4876]: I1215 09:15:12.301728 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:15:12 crc kubenswrapper[4876]: I1215 09:15:12.364268 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:15:12 crc kubenswrapper[4876]: I1215 09:15:12.440970 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vflnr"] Dec 15 09:15:12 crc kubenswrapper[4876]: I1215 09:15:12.544998 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lcw6w"] Dec 15 09:15:12 crc kubenswrapper[4876]: I1215 09:15:12.545276 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lcw6w" podUID="39dae781-a31a-44d6-aa95-4766741ccf26" containerName="registry-server" containerID="cri-o://22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4" gracePeriod=2 Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.179397 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.315715 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-utilities\") pod \"39dae781-a31a-44d6-aa95-4766741ccf26\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.315791 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnf4r\" (UniqueName: \"kubernetes.io/projected/39dae781-a31a-44d6-aa95-4766741ccf26-kube-api-access-dnf4r\") pod \"39dae781-a31a-44d6-aa95-4766741ccf26\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.315939 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-catalog-content\") pod \"39dae781-a31a-44d6-aa95-4766741ccf26\" (UID: \"39dae781-a31a-44d6-aa95-4766741ccf26\") " Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.317359 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-utilities" (OuterVolumeSpecName: "utilities") pod "39dae781-a31a-44d6-aa95-4766741ccf26" (UID: "39dae781-a31a-44d6-aa95-4766741ccf26"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.325629 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39dae781-a31a-44d6-aa95-4766741ccf26-kube-api-access-dnf4r" (OuterVolumeSpecName: "kube-api-access-dnf4r") pod "39dae781-a31a-44d6-aa95-4766741ccf26" (UID: "39dae781-a31a-44d6-aa95-4766741ccf26"). InnerVolumeSpecName "kube-api-access-dnf4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.376951 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39dae781-a31a-44d6-aa95-4766741ccf26" (UID: "39dae781-a31a-44d6-aa95-4766741ccf26"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.390801 4876 generic.go:334] "Generic (PLEG): container finished" podID="39dae781-a31a-44d6-aa95-4766741ccf26" containerID="22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4" exitCode=0 Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.391299 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lcw6w" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.391832 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcw6w" event={"ID":"39dae781-a31a-44d6-aa95-4766741ccf26","Type":"ContainerDied","Data":"22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4"} Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.391865 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lcw6w" event={"ID":"39dae781-a31a-44d6-aa95-4766741ccf26","Type":"ContainerDied","Data":"86eb57a5f07487de94788ddde15ad2550c6b243845ee636e9764a5dcff04e2af"} Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.391881 4876 scope.go:117] "RemoveContainer" containerID="22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.418004 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.418046 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnf4r\" (UniqueName: \"kubernetes.io/projected/39dae781-a31a-44d6-aa95-4766741ccf26-kube-api-access-dnf4r\") on node \"crc\" DevicePath \"\"" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.418056 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39dae781-a31a-44d6-aa95-4766741ccf26-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.434874 4876 scope.go:117] "RemoveContainer" containerID="0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.455402 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lcw6w"] Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.478729 4876 scope.go:117] "RemoveContainer" containerID="c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.485613 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lcw6w"] Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.546426 4876 scope.go:117] "RemoveContainer" containerID="22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4" Dec 15 09:15:13 crc kubenswrapper[4876]: E1215 09:15:13.546999 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4\": container with ID starting with 22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4 not found: ID does not exist" containerID="22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.547049 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4"} err="failed to get container status \"22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4\": rpc error: code = NotFound desc = could not find container \"22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4\": container with ID starting with 22fada0f2b69a56a6b04f6173c0c54a26e150d6e150a117873527e0e17995af4 not found: ID does not exist" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.547079 4876 scope.go:117] "RemoveContainer" containerID="0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5" Dec 15 09:15:13 crc kubenswrapper[4876]: E1215 09:15:13.547445 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5\": container with ID starting with 0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5 not found: ID does not exist" containerID="0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.547464 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5"} err="failed to get container status \"0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5\": rpc error: code = NotFound desc = could not find container \"0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5\": container with ID starting with 0628fecdc7faf4682d16737a1a056609706e9381fc2662817b26d8d9d9f50cd5 not found: ID does not exist" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.547477 4876 scope.go:117] "RemoveContainer" containerID="c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4" Dec 15 09:15:13 crc kubenswrapper[4876]: E1215 09:15:13.547759 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4\": container with ID starting with c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4 not found: ID does not exist" containerID="c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4" Dec 15 09:15:13 crc kubenswrapper[4876]: I1215 09:15:13.547819 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4"} err="failed to get container status \"c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4\": rpc error: code = NotFound desc = could not find container \"c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4\": container with ID starting with c9cff0ddcf82828bfec9741fa93817e4e7145ba3dcd954a311b7c5b81966ada4 not found: ID does not exist" Dec 15 09:15:14 crc kubenswrapper[4876]: I1215 09:15:14.727816 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39dae781-a31a-44d6-aa95-4766741ccf26" path="/var/lib/kubelet/pods/39dae781-a31a-44d6-aa95-4766741ccf26/volumes" Dec 15 09:15:21 crc kubenswrapper[4876]: I1215 09:15:21.706174 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:15:21 crc kubenswrapper[4876]: E1215 09:15:21.706837 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:15:31 crc kubenswrapper[4876]: I1215 09:15:31.224304 4876 scope.go:117] "RemoveContainer" containerID="9e973bff45584da78eb97c336fd6b6f895e5f9f9082691139256f10e86e05345" Dec 15 09:15:33 crc kubenswrapper[4876]: I1215 09:15:33.705910 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:15:33 crc kubenswrapper[4876]: E1215 09:15:33.706764 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:15:47 crc kubenswrapper[4876]: I1215 09:15:47.706059 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:15:47 crc kubenswrapper[4876]: E1215 09:15:47.706934 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:16:00 crc kubenswrapper[4876]: I1215 09:16:00.706193 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:16:00 crc kubenswrapper[4876]: E1215 09:16:00.707076 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:16:12 crc kubenswrapper[4876]: I1215 09:16:12.706030 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:16:12 crc kubenswrapper[4876]: E1215 09:16:12.706940 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:16:12 crc kubenswrapper[4876]: I1215 09:16:12.944577 4876 generic.go:334] "Generic (PLEG): container finished" podID="14a2c5ff-3623-4e41-9c5e-43f880d1f7bf" containerID="71f5e4fb308f32f3e7c127ae5eab351c9cb3b36033dde70f89acbca89f196153" exitCode=0 Dec 15 09:16:12 crc kubenswrapper[4876]: I1215 09:16:12.944635 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-networker-2f7nm" event={"ID":"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf","Type":"ContainerDied","Data":"71f5e4fb308f32f3e7c127ae5eab351c9cb3b36033dde70f89acbca89f196153"} Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.441589 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.535334 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-inventory\") pod \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.535503 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-ssh-key\") pod \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.535574 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpf5d\" (UniqueName: \"kubernetes.io/projected/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-kube-api-access-dpf5d\") pod \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\" (UID: \"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf\") " Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.562431 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-kube-api-access-dpf5d" (OuterVolumeSpecName: "kube-api-access-dpf5d") pod "14a2c5ff-3623-4e41-9c5e-43f880d1f7bf" (UID: "14a2c5ff-3623-4e41-9c5e-43f880d1f7bf"). InnerVolumeSpecName "kube-api-access-dpf5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.583643 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "14a2c5ff-3623-4e41-9c5e-43f880d1f7bf" (UID: "14a2c5ff-3623-4e41-9c5e-43f880d1f7bf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.633803 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-inventory" (OuterVolumeSpecName: "inventory") pod "14a2c5ff-3623-4e41-9c5e-43f880d1f7bf" (UID: "14a2c5ff-3623-4e41-9c5e-43f880d1f7bf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.637497 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.637534 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpf5d\" (UniqueName: \"kubernetes.io/projected/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-kube-api-access-dpf5d\") on node \"crc\" DevicePath \"\"" Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.637546 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/14a2c5ff-3623-4e41-9c5e-43f880d1f7bf-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.963805 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-networker-2f7nm" event={"ID":"14a2c5ff-3623-4e41-9c5e-43f880d1f7bf","Type":"ContainerDied","Data":"ad6658037e956f89b8baa3d0d401e18363e135a814dcd63c0aa4ed0ce76bb844"} Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.964153 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad6658037e956f89b8baa3d0d401e18363e135a814dcd63c0aa4ed0ce76bb844" Dec 15 09:16:14 crc kubenswrapper[4876]: I1215 09:16:14.963875 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-networker-2f7nm" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.051678 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-networker-sjzvd"] Dec 15 09:16:15 crc kubenswrapper[4876]: E1215 09:16:15.052202 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c04c1272-651f-4964-bd1b-5fae3e479f8e" containerName="collect-profiles" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.052228 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c04c1272-651f-4964-bd1b-5fae3e479f8e" containerName="collect-profiles" Dec 15 09:16:15 crc kubenswrapper[4876]: E1215 09:16:15.052253 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39dae781-a31a-44d6-aa95-4766741ccf26" containerName="registry-server" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.052261 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="39dae781-a31a-44d6-aa95-4766741ccf26" containerName="registry-server" Dec 15 09:16:15 crc kubenswrapper[4876]: E1215 09:16:15.052275 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39dae781-a31a-44d6-aa95-4766741ccf26" containerName="extract-content" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.052282 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="39dae781-a31a-44d6-aa95-4766741ccf26" containerName="extract-content" Dec 15 09:16:15 crc kubenswrapper[4876]: E1215 09:16:15.052300 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39dae781-a31a-44d6-aa95-4766741ccf26" containerName="extract-utilities" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.052307 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="39dae781-a31a-44d6-aa95-4766741ccf26" containerName="extract-utilities" Dec 15 09:16:15 crc kubenswrapper[4876]: E1215 09:16:15.052316 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a2c5ff-3623-4e41-9c5e-43f880d1f7bf" containerName="download-cache-openstack-openstack-networker" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.052323 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a2c5ff-3623-4e41-9c5e-43f880d1f7bf" containerName="download-cache-openstack-openstack-networker" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.052520 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="39dae781-a31a-44d6-aa95-4766741ccf26" containerName="registry-server" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.052540 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c04c1272-651f-4964-bd1b-5fae3e479f8e" containerName="collect-profiles" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.052554 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a2c5ff-3623-4e41-9c5e-43f880d1f7bf" containerName="download-cache-openstack-openstack-networker" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.053329 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.056542 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.058498 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.060918 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-networker-sjzvd"] Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.147778 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-inventory\") pod \"configure-network-openstack-openstack-networker-sjzvd\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.147838 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxnk6\" (UniqueName: \"kubernetes.io/projected/08da1b90-b47e-4073-92df-d3eb34b32ac1-kube-api-access-lxnk6\") pod \"configure-network-openstack-openstack-networker-sjzvd\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.148064 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-ssh-key\") pod \"configure-network-openstack-openstack-networker-sjzvd\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.249924 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-ssh-key\") pod \"configure-network-openstack-openstack-networker-sjzvd\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.249992 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-inventory\") pod \"configure-network-openstack-openstack-networker-sjzvd\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.250017 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxnk6\" (UniqueName: \"kubernetes.io/projected/08da1b90-b47e-4073-92df-d3eb34b32ac1-kube-api-access-lxnk6\") pod \"configure-network-openstack-openstack-networker-sjzvd\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.253746 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-inventory\") pod \"configure-network-openstack-openstack-networker-sjzvd\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.253956 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-ssh-key\") pod \"configure-network-openstack-openstack-networker-sjzvd\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.265120 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxnk6\" (UniqueName: \"kubernetes.io/projected/08da1b90-b47e-4073-92df-d3eb34b32ac1-kube-api-access-lxnk6\") pod \"configure-network-openstack-openstack-networker-sjzvd\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.378550 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.901357 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-networker-sjzvd"] Dec 15 09:16:15 crc kubenswrapper[4876]: I1215 09:16:15.983542 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-networker-sjzvd" event={"ID":"08da1b90-b47e-4073-92df-d3eb34b32ac1","Type":"ContainerStarted","Data":"77a136412e1205d4ed7f090ae632828b1a6574f98b5a3d1aedb274658d354bf3"} Dec 15 09:16:16 crc kubenswrapper[4876]: I1215 09:16:16.996840 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-networker-sjzvd" event={"ID":"08da1b90-b47e-4073-92df-d3eb34b32ac1","Type":"ContainerStarted","Data":"cabb4dbeb1cc56a4df694c261e5f49d91afb70ff71b53d089510506c122df62f"} Dec 15 09:16:17 crc kubenswrapper[4876]: I1215 09:16:17.013878 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-networker-sjzvd" podStartSLOduration=1.527026943 podStartE2EDuration="2.01385521s" podCreationTimestamp="2025-12-15 09:16:15 +0000 UTC" firstStartedPulling="2025-12-15 09:16:15.912729481 +0000 UTC m=+8701.483872392" lastFinishedPulling="2025-12-15 09:16:16.399557748 +0000 UTC m=+8701.970700659" observedRunningTime="2025-12-15 09:16:17.011409244 +0000 UTC m=+8702.582552155" watchObservedRunningTime="2025-12-15 09:16:17.01385521 +0000 UTC m=+8702.584998131" Dec 15 09:16:26 crc kubenswrapper[4876]: I1215 09:16:26.706275 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:16:26 crc kubenswrapper[4876]: E1215 09:16:26.707301 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:16:34 crc kubenswrapper[4876]: I1215 09:16:34.143187 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-799z9" event={"ID":"ca266618-8aff-4574-8a86-d987f671b431","Type":"ContainerDied","Data":"e4d2cdb4e49810481b46e0fca4ba2afb04b3535cde4639d45ce6f87df4c81cc3"} Dec 15 09:16:34 crc kubenswrapper[4876]: I1215 09:16:34.143086 4876 generic.go:334] "Generic (PLEG): container finished" podID="ca266618-8aff-4574-8a86-d987f671b431" containerID="e4d2cdb4e49810481b46e0fca4ba2afb04b3535cde4639d45ce6f87df4c81cc3" exitCode=0 Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.602503 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.695880 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ceph\") pod \"ca266618-8aff-4574-8a86-d987f671b431\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.696000 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-inventory\") pod \"ca266618-8aff-4574-8a86-d987f671b431\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.696024 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ssh-key\") pod \"ca266618-8aff-4574-8a86-d987f671b431\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.696140 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzt4g\" (UniqueName: \"kubernetes.io/projected/ca266618-8aff-4574-8a86-d987f671b431-kube-api-access-bzt4g\") pod \"ca266618-8aff-4574-8a86-d987f671b431\" (UID: \"ca266618-8aff-4574-8a86-d987f671b431\") " Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.701813 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca266618-8aff-4574-8a86-d987f671b431-kube-api-access-bzt4g" (OuterVolumeSpecName: "kube-api-access-bzt4g") pod "ca266618-8aff-4574-8a86-d987f671b431" (UID: "ca266618-8aff-4574-8a86-d987f671b431"). InnerVolumeSpecName "kube-api-access-bzt4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.705130 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ceph" (OuterVolumeSpecName: "ceph") pod "ca266618-8aff-4574-8a86-d987f671b431" (UID: "ca266618-8aff-4574-8a86-d987f671b431"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.728412 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-inventory" (OuterVolumeSpecName: "inventory") pod "ca266618-8aff-4574-8a86-d987f671b431" (UID: "ca266618-8aff-4574-8a86-d987f671b431"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.736983 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ca266618-8aff-4574-8a86-d987f671b431" (UID: "ca266618-8aff-4574-8a86-d987f671b431"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.799250 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.799301 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.799314 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzt4g\" (UniqueName: \"kubernetes.io/projected/ca266618-8aff-4574-8a86-d987f671b431-kube-api-access-bzt4g\") on node \"crc\" DevicePath \"\"" Dec 15 09:16:35 crc kubenswrapper[4876]: I1215 09:16:35.799326 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ca266618-8aff-4574-8a86-d987f671b431-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.162992 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-799z9" event={"ID":"ca266618-8aff-4574-8a86-d987f671b431","Type":"ContainerDied","Data":"892b79bfc9bebc4eb5c6d63128eca2888a0bd10687d03af9b57f7d77ec4b8055"} Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.163042 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="892b79bfc9bebc4eb5c6d63128eca2888a0bd10687d03af9b57f7d77ec4b8055" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.163142 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-799z9" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.243718 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-hw86n"] Dec 15 09:16:36 crc kubenswrapper[4876]: E1215 09:16:36.244204 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca266618-8aff-4574-8a86-d987f671b431" containerName="download-cache-openstack-openstack-cell1" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.244226 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca266618-8aff-4574-8a86-d987f671b431" containerName="download-cache-openstack-openstack-cell1" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.244511 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca266618-8aff-4574-8a86-d987f671b431" containerName="download-cache-openstack-openstack-cell1" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.247142 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.250443 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.250583 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.261698 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-hw86n"] Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.309969 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ssh-key\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.310034 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ceph\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.310253 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-inventory\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.310281 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk5c7\" (UniqueName: \"kubernetes.io/projected/f569b737-56ff-4e5e-af32-807e3a8ab69a-kube-api-access-fk5c7\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.412448 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-inventory\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.412491 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk5c7\" (UniqueName: \"kubernetes.io/projected/f569b737-56ff-4e5e-af32-807e3a8ab69a-kube-api-access-fk5c7\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.412553 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ssh-key\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.412580 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ceph\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.416543 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ceph\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.418150 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ssh-key\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.420633 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-inventory\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.430228 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk5c7\" (UniqueName: \"kubernetes.io/projected/f569b737-56ff-4e5e-af32-807e3a8ab69a-kube-api-access-fk5c7\") pod \"configure-network-openstack-openstack-cell1-hw86n\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:36 crc kubenswrapper[4876]: I1215 09:16:36.568490 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:16:37 crc kubenswrapper[4876]: I1215 09:16:37.088359 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-hw86n"] Dec 15 09:16:37 crc kubenswrapper[4876]: I1215 09:16:37.173044 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-hw86n" event={"ID":"f569b737-56ff-4e5e-af32-807e3a8ab69a","Type":"ContainerStarted","Data":"14aa9bcc7fd1d60bb46df365280c98c24f5a2299d70cb379dc20dd3a678134c1"} Dec 15 09:16:37 crc kubenswrapper[4876]: I1215 09:16:37.706206 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:16:37 crc kubenswrapper[4876]: E1215 09:16:37.706866 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:16:38 crc kubenswrapper[4876]: I1215 09:16:38.185702 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-hw86n" event={"ID":"f569b737-56ff-4e5e-af32-807e3a8ab69a","Type":"ContainerStarted","Data":"4864f87d60a5dc1b4ff5947b85799c5c604b76e4f6a262adfae3eebc66b54ce6"} Dec 15 09:16:38 crc kubenswrapper[4876]: I1215 09:16:38.224250 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-hw86n" podStartSLOduration=1.521037521 podStartE2EDuration="2.22422784s" podCreationTimestamp="2025-12-15 09:16:36 +0000 UTC" firstStartedPulling="2025-12-15 09:16:37.103314601 +0000 UTC m=+8722.674457512" lastFinishedPulling="2025-12-15 09:16:37.80650492 +0000 UTC m=+8723.377647831" observedRunningTime="2025-12-15 09:16:38.222078033 +0000 UTC m=+8723.793220954" watchObservedRunningTime="2025-12-15 09:16:38.22422784 +0000 UTC m=+8723.795370781" Dec 15 09:16:51 crc kubenswrapper[4876]: I1215 09:16:51.705948 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:16:51 crc kubenswrapper[4876]: E1215 09:16:51.706684 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:17:04 crc kubenswrapper[4876]: I1215 09:17:04.713727 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:17:05 crc kubenswrapper[4876]: I1215 09:17:05.436275 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"76bca0552147ae11fb9dc646aa45aa28e4f6dd0076be73075000c7f62adf0a15"} Dec 15 09:17:16 crc kubenswrapper[4876]: I1215 09:17:16.533773 4876 generic.go:334] "Generic (PLEG): container finished" podID="08da1b90-b47e-4073-92df-d3eb34b32ac1" containerID="cabb4dbeb1cc56a4df694c261e5f49d91afb70ff71b53d089510506c122df62f" exitCode=0 Dec 15 09:17:16 crc kubenswrapper[4876]: I1215 09:17:16.533837 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-networker-sjzvd" event={"ID":"08da1b90-b47e-4073-92df-d3eb34b32ac1","Type":"ContainerDied","Data":"cabb4dbeb1cc56a4df694c261e5f49d91afb70ff71b53d089510506c122df62f"} Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.051694 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.164140 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxnk6\" (UniqueName: \"kubernetes.io/projected/08da1b90-b47e-4073-92df-d3eb34b32ac1-kube-api-access-lxnk6\") pod \"08da1b90-b47e-4073-92df-d3eb34b32ac1\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.164251 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-inventory\") pod \"08da1b90-b47e-4073-92df-d3eb34b32ac1\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.164375 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-ssh-key\") pod \"08da1b90-b47e-4073-92df-d3eb34b32ac1\" (UID: \"08da1b90-b47e-4073-92df-d3eb34b32ac1\") " Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.169791 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08da1b90-b47e-4073-92df-d3eb34b32ac1-kube-api-access-lxnk6" (OuterVolumeSpecName: "kube-api-access-lxnk6") pod "08da1b90-b47e-4073-92df-d3eb34b32ac1" (UID: "08da1b90-b47e-4073-92df-d3eb34b32ac1"). InnerVolumeSpecName "kube-api-access-lxnk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.192594 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "08da1b90-b47e-4073-92df-d3eb34b32ac1" (UID: "08da1b90-b47e-4073-92df-d3eb34b32ac1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.195983 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-inventory" (OuterVolumeSpecName: "inventory") pod "08da1b90-b47e-4073-92df-d3eb34b32ac1" (UID: "08da1b90-b47e-4073-92df-d3eb34b32ac1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.271490 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.271534 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxnk6\" (UniqueName: \"kubernetes.io/projected/08da1b90-b47e-4073-92df-d3eb34b32ac1-kube-api-access-lxnk6\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.271550 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08da1b90-b47e-4073-92df-d3eb34b32ac1-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.557878 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-networker-sjzvd" event={"ID":"08da1b90-b47e-4073-92df-d3eb34b32ac1","Type":"ContainerDied","Data":"77a136412e1205d4ed7f090ae632828b1a6574f98b5a3d1aedb274658d354bf3"} Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.557922 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77a136412e1205d4ed7f090ae632828b1a6574f98b5a3d1aedb274658d354bf3" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.557956 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-networker-sjzvd" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.638060 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-networker-nq5hm"] Dec 15 09:17:18 crc kubenswrapper[4876]: E1215 09:17:18.638656 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08da1b90-b47e-4073-92df-d3eb34b32ac1" containerName="configure-network-openstack-openstack-networker" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.638680 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="08da1b90-b47e-4073-92df-d3eb34b32ac1" containerName="configure-network-openstack-openstack-networker" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.638938 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="08da1b90-b47e-4073-92df-d3eb34b32ac1" containerName="configure-network-openstack-openstack-networker" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.640005 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.642617 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.642788 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.648825 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-networker-nq5hm"] Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.781447 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-ssh-key\") pod \"validate-network-openstack-openstack-networker-nq5hm\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.781873 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-inventory\") pod \"validate-network-openstack-openstack-networker-nq5hm\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.781980 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlhxb\" (UniqueName: \"kubernetes.io/projected/a8a4628a-a631-4db9-bdd5-7040f07f868c-kube-api-access-wlhxb\") pod \"validate-network-openstack-openstack-networker-nq5hm\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.884057 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlhxb\" (UniqueName: \"kubernetes.io/projected/a8a4628a-a631-4db9-bdd5-7040f07f868c-kube-api-access-wlhxb\") pod \"validate-network-openstack-openstack-networker-nq5hm\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.884244 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-ssh-key\") pod \"validate-network-openstack-openstack-networker-nq5hm\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:18 crc kubenswrapper[4876]: I1215 09:17:18.884360 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-inventory\") pod \"validate-network-openstack-openstack-networker-nq5hm\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:19 crc kubenswrapper[4876]: I1215 09:17:19.257883 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-inventory\") pod \"validate-network-openstack-openstack-networker-nq5hm\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:19 crc kubenswrapper[4876]: I1215 09:17:19.258467 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-ssh-key\") pod \"validate-network-openstack-openstack-networker-nq5hm\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:19 crc kubenswrapper[4876]: I1215 09:17:19.258643 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlhxb\" (UniqueName: \"kubernetes.io/projected/a8a4628a-a631-4db9-bdd5-7040f07f868c-kube-api-access-wlhxb\") pod \"validate-network-openstack-openstack-networker-nq5hm\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:19 crc kubenswrapper[4876]: I1215 09:17:19.558912 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:20 crc kubenswrapper[4876]: I1215 09:17:20.079140 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-networker-nq5hm"] Dec 15 09:17:20 crc kubenswrapper[4876]: I1215 09:17:20.590055 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-networker-nq5hm" event={"ID":"a8a4628a-a631-4db9-bdd5-7040f07f868c","Type":"ContainerStarted","Data":"c5088dadd29e712e038a38f1945108bae6f12d9637ca90998314c480455981dc"} Dec 15 09:17:22 crc kubenswrapper[4876]: I1215 09:17:22.612453 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-networker-nq5hm" event={"ID":"a8a4628a-a631-4db9-bdd5-7040f07f868c","Type":"ContainerStarted","Data":"5c148f4c04bafa34d302c4ca58b8705e3406c3bf0253a725b0b246028c4ec611"} Dec 15 09:17:22 crc kubenswrapper[4876]: I1215 09:17:22.633183 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-networker-nq5hm" podStartSLOduration=3.004121397 podStartE2EDuration="4.633163905s" podCreationTimestamp="2025-12-15 09:17:18 +0000 UTC" firstStartedPulling="2025-12-15 09:17:20.077873016 +0000 UTC m=+8765.649015927" lastFinishedPulling="2025-12-15 09:17:21.706915524 +0000 UTC m=+8767.278058435" observedRunningTime="2025-12-15 09:17:22.626584248 +0000 UTC m=+8768.197727169" watchObservedRunningTime="2025-12-15 09:17:22.633163905 +0000 UTC m=+8768.204306806" Dec 15 09:17:28 crc kubenswrapper[4876]: I1215 09:17:28.674281 4876 generic.go:334] "Generic (PLEG): container finished" podID="a8a4628a-a631-4db9-bdd5-7040f07f868c" containerID="5c148f4c04bafa34d302c4ca58b8705e3406c3bf0253a725b0b246028c4ec611" exitCode=0 Dec 15 09:17:28 crc kubenswrapper[4876]: I1215 09:17:28.674381 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-networker-nq5hm" event={"ID":"a8a4628a-a631-4db9-bdd5-7040f07f868c","Type":"ContainerDied","Data":"5c148f4c04bafa34d302c4ca58b8705e3406c3bf0253a725b0b246028c4ec611"} Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.214916 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.333237 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-ssh-key\") pod \"a8a4628a-a631-4db9-bdd5-7040f07f868c\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.333294 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-inventory\") pod \"a8a4628a-a631-4db9-bdd5-7040f07f868c\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.333495 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlhxb\" (UniqueName: \"kubernetes.io/projected/a8a4628a-a631-4db9-bdd5-7040f07f868c-kube-api-access-wlhxb\") pod \"a8a4628a-a631-4db9-bdd5-7040f07f868c\" (UID: \"a8a4628a-a631-4db9-bdd5-7040f07f868c\") " Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.342296 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8a4628a-a631-4db9-bdd5-7040f07f868c-kube-api-access-wlhxb" (OuterVolumeSpecName: "kube-api-access-wlhxb") pod "a8a4628a-a631-4db9-bdd5-7040f07f868c" (UID: "a8a4628a-a631-4db9-bdd5-7040f07f868c"). InnerVolumeSpecName "kube-api-access-wlhxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.365483 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a8a4628a-a631-4db9-bdd5-7040f07f868c" (UID: "a8a4628a-a631-4db9-bdd5-7040f07f868c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.371924 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-inventory" (OuterVolumeSpecName: "inventory") pod "a8a4628a-a631-4db9-bdd5-7040f07f868c" (UID: "a8a4628a-a631-4db9-bdd5-7040f07f868c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.437646 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlhxb\" (UniqueName: \"kubernetes.io/projected/a8a4628a-a631-4db9-bdd5-7040f07f868c-kube-api-access-wlhxb\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.437694 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.437707 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a8a4628a-a631-4db9-bdd5-7040f07f868c-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.692842 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-networker-nq5hm" event={"ID":"a8a4628a-a631-4db9-bdd5-7040f07f868c","Type":"ContainerDied","Data":"c5088dadd29e712e038a38f1945108bae6f12d9637ca90998314c480455981dc"} Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.693199 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5088dadd29e712e038a38f1945108bae6f12d9637ca90998314c480455981dc" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.692950 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-networker-nq5hm" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.792731 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-networker-vwk9m"] Dec 15 09:17:30 crc kubenswrapper[4876]: E1215 09:17:30.793542 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8a4628a-a631-4db9-bdd5-7040f07f868c" containerName="validate-network-openstack-openstack-networker" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.793568 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8a4628a-a631-4db9-bdd5-7040f07f868c" containerName="validate-network-openstack-openstack-networker" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.793856 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8a4628a-a631-4db9-bdd5-7040f07f868c" containerName="validate-network-openstack-openstack-networker" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.795129 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.797174 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.799348 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.803421 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-networker-vwk9m"] Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.946707 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-inventory\") pod \"install-os-openstack-openstack-networker-vwk9m\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.946760 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swvfc\" (UniqueName: \"kubernetes.io/projected/79b4891a-c87e-4cbf-a506-c1dce22f2555-kube-api-access-swvfc\") pod \"install-os-openstack-openstack-networker-vwk9m\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:30 crc kubenswrapper[4876]: I1215 09:17:30.946886 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-ssh-key\") pod \"install-os-openstack-openstack-networker-vwk9m\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:31 crc kubenswrapper[4876]: I1215 09:17:31.048089 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-inventory\") pod \"install-os-openstack-openstack-networker-vwk9m\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:31 crc kubenswrapper[4876]: I1215 09:17:31.048155 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swvfc\" (UniqueName: \"kubernetes.io/projected/79b4891a-c87e-4cbf-a506-c1dce22f2555-kube-api-access-swvfc\") pod \"install-os-openstack-openstack-networker-vwk9m\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:31 crc kubenswrapper[4876]: I1215 09:17:31.048272 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-ssh-key\") pod \"install-os-openstack-openstack-networker-vwk9m\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:31 crc kubenswrapper[4876]: I1215 09:17:31.053924 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-inventory\") pod \"install-os-openstack-openstack-networker-vwk9m\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:31 crc kubenswrapper[4876]: I1215 09:17:31.067011 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-ssh-key\") pod \"install-os-openstack-openstack-networker-vwk9m\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:31 crc kubenswrapper[4876]: I1215 09:17:31.067515 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swvfc\" (UniqueName: \"kubernetes.io/projected/79b4891a-c87e-4cbf-a506-c1dce22f2555-kube-api-access-swvfc\") pod \"install-os-openstack-openstack-networker-vwk9m\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:31 crc kubenswrapper[4876]: I1215 09:17:31.116078 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:17:31 crc kubenswrapper[4876]: I1215 09:17:31.642847 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-networker-vwk9m"] Dec 15 09:17:31 crc kubenswrapper[4876]: I1215 09:17:31.708974 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-networker-vwk9m" event={"ID":"79b4891a-c87e-4cbf-a506-c1dce22f2555","Type":"ContainerStarted","Data":"2c5d564d1552c1fed109d88a5a9de704409c0e09cbc5a9a416577984194b946c"} Dec 15 09:17:33 crc kubenswrapper[4876]: I1215 09:17:33.737062 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-networker-vwk9m" event={"ID":"79b4891a-c87e-4cbf-a506-c1dce22f2555","Type":"ContainerStarted","Data":"dd38fbdc211069bfd2e664aa50f61e5d88507ee225b53f55e53705d01a4eed2c"} Dec 15 09:17:33 crc kubenswrapper[4876]: I1215 09:17:33.756779 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-networker-vwk9m" podStartSLOduration=2.412129455 podStartE2EDuration="3.756761654s" podCreationTimestamp="2025-12-15 09:17:30 +0000 UTC" firstStartedPulling="2025-12-15 09:17:31.64242733 +0000 UTC m=+8777.213570251" lastFinishedPulling="2025-12-15 09:17:32.987059539 +0000 UTC m=+8778.558202450" observedRunningTime="2025-12-15 09:17:33.751258536 +0000 UTC m=+8779.322401457" watchObservedRunningTime="2025-12-15 09:17:33.756761654 +0000 UTC m=+8779.327904565" Dec 15 09:17:42 crc kubenswrapper[4876]: I1215 09:17:42.818384 4876 generic.go:334] "Generic (PLEG): container finished" podID="f569b737-56ff-4e5e-af32-807e3a8ab69a" containerID="4864f87d60a5dc1b4ff5947b85799c5c604b76e4f6a262adfae3eebc66b54ce6" exitCode=0 Dec 15 09:17:42 crc kubenswrapper[4876]: I1215 09:17:42.818495 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-hw86n" event={"ID":"f569b737-56ff-4e5e-af32-807e3a8ab69a","Type":"ContainerDied","Data":"4864f87d60a5dc1b4ff5947b85799c5c604b76e4f6a262adfae3eebc66b54ce6"} Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.325245 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.467245 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ceph\") pod \"f569b737-56ff-4e5e-af32-807e3a8ab69a\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.467344 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fk5c7\" (UniqueName: \"kubernetes.io/projected/f569b737-56ff-4e5e-af32-807e3a8ab69a-kube-api-access-fk5c7\") pod \"f569b737-56ff-4e5e-af32-807e3a8ab69a\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.467377 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-inventory\") pod \"f569b737-56ff-4e5e-af32-807e3a8ab69a\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.467498 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ssh-key\") pod \"f569b737-56ff-4e5e-af32-807e3a8ab69a\" (UID: \"f569b737-56ff-4e5e-af32-807e3a8ab69a\") " Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.473303 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ceph" (OuterVolumeSpecName: "ceph") pod "f569b737-56ff-4e5e-af32-807e3a8ab69a" (UID: "f569b737-56ff-4e5e-af32-807e3a8ab69a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.473529 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f569b737-56ff-4e5e-af32-807e3a8ab69a-kube-api-access-fk5c7" (OuterVolumeSpecName: "kube-api-access-fk5c7") pod "f569b737-56ff-4e5e-af32-807e3a8ab69a" (UID: "f569b737-56ff-4e5e-af32-807e3a8ab69a"). InnerVolumeSpecName "kube-api-access-fk5c7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.497508 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f569b737-56ff-4e5e-af32-807e3a8ab69a" (UID: "f569b737-56ff-4e5e-af32-807e3a8ab69a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.497616 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-inventory" (OuterVolumeSpecName: "inventory") pod "f569b737-56ff-4e5e-af32-807e3a8ab69a" (UID: "f569b737-56ff-4e5e-af32-807e3a8ab69a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.570729 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.570795 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fk5c7\" (UniqueName: \"kubernetes.io/projected/f569b737-56ff-4e5e-af32-807e3a8ab69a-kube-api-access-fk5c7\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.570806 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.570816 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f569b737-56ff-4e5e-af32-807e3a8ab69a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.837716 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-hw86n" event={"ID":"f569b737-56ff-4e5e-af32-807e3a8ab69a","Type":"ContainerDied","Data":"14aa9bcc7fd1d60bb46df365280c98c24f5a2299d70cb379dc20dd3a678134c1"} Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.838001 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14aa9bcc7fd1d60bb46df365280c98c24f5a2299d70cb379dc20dd3a678134c1" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.837798 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-hw86n" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.913638 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-54wdc"] Dec 15 09:17:44 crc kubenswrapper[4876]: E1215 09:17:44.914180 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f569b737-56ff-4e5e-af32-807e3a8ab69a" containerName="configure-network-openstack-openstack-cell1" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.914204 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f569b737-56ff-4e5e-af32-807e3a8ab69a" containerName="configure-network-openstack-openstack-cell1" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.914477 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f569b737-56ff-4e5e-af32-807e3a8ab69a" containerName="configure-network-openstack-openstack-cell1" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.916404 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.922767 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.922909 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.931303 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-54wdc"] Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.982652 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ssh-key\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.982959 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvgzh\" (UniqueName: \"kubernetes.io/projected/20dc7bae-3650-4be1-af27-121f8ace7a4b-kube-api-access-fvgzh\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.983182 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-inventory\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:44 crc kubenswrapper[4876]: I1215 09:17:44.983348 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ceph\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:45 crc kubenswrapper[4876]: I1215 09:17:45.085974 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ssh-key\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:45 crc kubenswrapper[4876]: I1215 09:17:45.086163 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvgzh\" (UniqueName: \"kubernetes.io/projected/20dc7bae-3650-4be1-af27-121f8ace7a4b-kube-api-access-fvgzh\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:45 crc kubenswrapper[4876]: I1215 09:17:45.086224 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-inventory\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:45 crc kubenswrapper[4876]: I1215 09:17:45.086261 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ceph\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:45 crc kubenswrapper[4876]: I1215 09:17:45.092799 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ceph\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:45 crc kubenswrapper[4876]: I1215 09:17:45.106149 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ssh-key\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:45 crc kubenswrapper[4876]: I1215 09:17:45.110841 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-inventory\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:45 crc kubenswrapper[4876]: I1215 09:17:45.115139 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvgzh\" (UniqueName: \"kubernetes.io/projected/20dc7bae-3650-4be1-af27-121f8ace7a4b-kube-api-access-fvgzh\") pod \"validate-network-openstack-openstack-cell1-54wdc\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:45 crc kubenswrapper[4876]: I1215 09:17:45.283009 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:45 crc kubenswrapper[4876]: I1215 09:17:45.847445 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-54wdc"] Dec 15 09:17:46 crc kubenswrapper[4876]: I1215 09:17:46.867713 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-54wdc" event={"ID":"20dc7bae-3650-4be1-af27-121f8ace7a4b","Type":"ContainerStarted","Data":"82e06a8d98951a1b17b2ec4c3df006531824cb895137785e43b69f2aedeac428"} Dec 15 09:17:46 crc kubenswrapper[4876]: I1215 09:17:46.869404 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-54wdc" event={"ID":"20dc7bae-3650-4be1-af27-121f8ace7a4b","Type":"ContainerStarted","Data":"9726969fbeaeac1857ea49605354dc84bee05c2a4613115919e8841114cd1067"} Dec 15 09:17:46 crc kubenswrapper[4876]: I1215 09:17:46.895170 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-54wdc" podStartSLOduration=2.44256996 podStartE2EDuration="2.895152297s" podCreationTimestamp="2025-12-15 09:17:44 +0000 UTC" firstStartedPulling="2025-12-15 09:17:45.85622343 +0000 UTC m=+8791.427366341" lastFinishedPulling="2025-12-15 09:17:46.308805767 +0000 UTC m=+8791.879948678" observedRunningTime="2025-12-15 09:17:46.887740698 +0000 UTC m=+8792.458883619" watchObservedRunningTime="2025-12-15 09:17:46.895152297 +0000 UTC m=+8792.466295208" Dec 15 09:17:52 crc kubenswrapper[4876]: I1215 09:17:52.921879 4876 generic.go:334] "Generic (PLEG): container finished" podID="20dc7bae-3650-4be1-af27-121f8ace7a4b" containerID="82e06a8d98951a1b17b2ec4c3df006531824cb895137785e43b69f2aedeac428" exitCode=0 Dec 15 09:17:52 crc kubenswrapper[4876]: I1215 09:17:52.922003 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-54wdc" event={"ID":"20dc7bae-3650-4be1-af27-121f8ace7a4b","Type":"ContainerDied","Data":"82e06a8d98951a1b17b2ec4c3df006531824cb895137785e43b69f2aedeac428"} Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.365875 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.496274 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ssh-key\") pod \"20dc7bae-3650-4be1-af27-121f8ace7a4b\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.496397 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-inventory\") pod \"20dc7bae-3650-4be1-af27-121f8ace7a4b\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.496661 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ceph\") pod \"20dc7bae-3650-4be1-af27-121f8ace7a4b\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.496748 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvgzh\" (UniqueName: \"kubernetes.io/projected/20dc7bae-3650-4be1-af27-121f8ace7a4b-kube-api-access-fvgzh\") pod \"20dc7bae-3650-4be1-af27-121f8ace7a4b\" (UID: \"20dc7bae-3650-4be1-af27-121f8ace7a4b\") " Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.503276 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20dc7bae-3650-4be1-af27-121f8ace7a4b-kube-api-access-fvgzh" (OuterVolumeSpecName: "kube-api-access-fvgzh") pod "20dc7bae-3650-4be1-af27-121f8ace7a4b" (UID: "20dc7bae-3650-4be1-af27-121f8ace7a4b"). InnerVolumeSpecName "kube-api-access-fvgzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.504945 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ceph" (OuterVolumeSpecName: "ceph") pod "20dc7bae-3650-4be1-af27-121f8ace7a4b" (UID: "20dc7bae-3650-4be1-af27-121f8ace7a4b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.529162 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "20dc7bae-3650-4be1-af27-121f8ace7a4b" (UID: "20dc7bae-3650-4be1-af27-121f8ace7a4b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.529696 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-inventory" (OuterVolumeSpecName: "inventory") pod "20dc7bae-3650-4be1-af27-121f8ace7a4b" (UID: "20dc7bae-3650-4be1-af27-121f8ace7a4b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.599259 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.599303 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvgzh\" (UniqueName: \"kubernetes.io/projected/20dc7bae-3650-4be1-af27-121f8ace7a4b-kube-api-access-fvgzh\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.599322 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.599333 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/20dc7bae-3650-4be1-af27-121f8ace7a4b-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.942872 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-54wdc" event={"ID":"20dc7bae-3650-4be1-af27-121f8ace7a4b","Type":"ContainerDied","Data":"9726969fbeaeac1857ea49605354dc84bee05c2a4613115919e8841114cd1067"} Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.943328 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9726969fbeaeac1857ea49605354dc84bee05c2a4613115919e8841114cd1067" Dec 15 09:17:54 crc kubenswrapper[4876]: I1215 09:17:54.942918 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-54wdc" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.014625 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-hgcsp"] Dec 15 09:17:55 crc kubenswrapper[4876]: E1215 09:17:55.015186 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20dc7bae-3650-4be1-af27-121f8ace7a4b" containerName="validate-network-openstack-openstack-cell1" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.015211 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="20dc7bae-3650-4be1-af27-121f8ace7a4b" containerName="validate-network-openstack-openstack-cell1" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.015515 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="20dc7bae-3650-4be1-af27-121f8ace7a4b" containerName="validate-network-openstack-openstack-cell1" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.016427 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.040710 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.041717 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.067904 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-hgcsp"] Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.111010 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-inventory\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.111345 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ceph\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.111479 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ssh-key\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.111703 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbcl7\" (UniqueName: \"kubernetes.io/projected/3df0942a-900c-413a-9cde-671328743f83-kube-api-access-bbcl7\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.214129 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-inventory\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.214203 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ceph\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.214241 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ssh-key\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.214295 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbcl7\" (UniqueName: \"kubernetes.io/projected/3df0942a-900c-413a-9cde-671328743f83-kube-api-access-bbcl7\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.219701 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ssh-key\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.219724 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ceph\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.222014 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-inventory\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.231746 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbcl7\" (UniqueName: \"kubernetes.io/projected/3df0942a-900c-413a-9cde-671328743f83-kube-api-access-bbcl7\") pod \"install-os-openstack-openstack-cell1-hgcsp\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.343433 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.899552 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-hgcsp"] Dec 15 09:17:55 crc kubenswrapper[4876]: W1215 09:17:55.905218 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3df0942a_900c_413a_9cde_671328743f83.slice/crio-e2cbac4ac739e45b80f3fbb2b33fae0cd2d13d66eef14dd5b229d2c2f8789539 WatchSource:0}: Error finding container e2cbac4ac739e45b80f3fbb2b33fae0cd2d13d66eef14dd5b229d2c2f8789539: Status 404 returned error can't find the container with id e2cbac4ac739e45b80f3fbb2b33fae0cd2d13d66eef14dd5b229d2c2f8789539 Dec 15 09:17:55 crc kubenswrapper[4876]: I1215 09:17:55.951795 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-hgcsp" event={"ID":"3df0942a-900c-413a-9cde-671328743f83","Type":"ContainerStarted","Data":"e2cbac4ac739e45b80f3fbb2b33fae0cd2d13d66eef14dd5b229d2c2f8789539"} Dec 15 09:17:57 crc kubenswrapper[4876]: I1215 09:17:57.977480 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-hgcsp" event={"ID":"3df0942a-900c-413a-9cde-671328743f83","Type":"ContainerStarted","Data":"46a487a1f9d3e6a94e19036c735c399014ece8a1f79ad89b2cdab3138c45e3da"} Dec 15 09:17:58 crc kubenswrapper[4876]: I1215 09:17:58.002961 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-hgcsp" podStartSLOduration=2.550541958 podStartE2EDuration="4.002929222s" podCreationTimestamp="2025-12-15 09:17:54 +0000 UTC" firstStartedPulling="2025-12-15 09:17:55.907098424 +0000 UTC m=+8801.478241335" lastFinishedPulling="2025-12-15 09:17:57.359485698 +0000 UTC m=+8802.930628599" observedRunningTime="2025-12-15 09:17:57.992574854 +0000 UTC m=+8803.563717765" watchObservedRunningTime="2025-12-15 09:17:58.002929222 +0000 UTC m=+8803.574072133" Dec 15 09:18:25 crc kubenswrapper[4876]: I1215 09:18:25.245695 4876 generic.go:334] "Generic (PLEG): container finished" podID="79b4891a-c87e-4cbf-a506-c1dce22f2555" containerID="dd38fbdc211069bfd2e664aa50f61e5d88507ee225b53f55e53705d01a4eed2c" exitCode=0 Dec 15 09:18:25 crc kubenswrapper[4876]: I1215 09:18:25.245792 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-networker-vwk9m" event={"ID":"79b4891a-c87e-4cbf-a506-c1dce22f2555","Type":"ContainerDied","Data":"dd38fbdc211069bfd2e664aa50f61e5d88507ee225b53f55e53705d01a4eed2c"} Dec 15 09:18:26 crc kubenswrapper[4876]: I1215 09:18:26.822309 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:18:26 crc kubenswrapper[4876]: I1215 09:18:26.932281 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swvfc\" (UniqueName: \"kubernetes.io/projected/79b4891a-c87e-4cbf-a506-c1dce22f2555-kube-api-access-swvfc\") pod \"79b4891a-c87e-4cbf-a506-c1dce22f2555\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " Dec 15 09:18:26 crc kubenswrapper[4876]: I1215 09:18:26.932333 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-ssh-key\") pod \"79b4891a-c87e-4cbf-a506-c1dce22f2555\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " Dec 15 09:18:26 crc kubenswrapper[4876]: I1215 09:18:26.932400 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-inventory\") pod \"79b4891a-c87e-4cbf-a506-c1dce22f2555\" (UID: \"79b4891a-c87e-4cbf-a506-c1dce22f2555\") " Dec 15 09:18:26 crc kubenswrapper[4876]: I1215 09:18:26.938819 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79b4891a-c87e-4cbf-a506-c1dce22f2555-kube-api-access-swvfc" (OuterVolumeSpecName: "kube-api-access-swvfc") pod "79b4891a-c87e-4cbf-a506-c1dce22f2555" (UID: "79b4891a-c87e-4cbf-a506-c1dce22f2555"). InnerVolumeSpecName "kube-api-access-swvfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:18:26 crc kubenswrapper[4876]: I1215 09:18:26.963975 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-inventory" (OuterVolumeSpecName: "inventory") pod "79b4891a-c87e-4cbf-a506-c1dce22f2555" (UID: "79b4891a-c87e-4cbf-a506-c1dce22f2555"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:18:26 crc kubenswrapper[4876]: I1215 09:18:26.966507 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "79b4891a-c87e-4cbf-a506-c1dce22f2555" (UID: "79b4891a-c87e-4cbf-a506-c1dce22f2555"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.036116 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swvfc\" (UniqueName: \"kubernetes.io/projected/79b4891a-c87e-4cbf-a506-c1dce22f2555-kube-api-access-swvfc\") on node \"crc\" DevicePath \"\"" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.036188 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.036199 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/79b4891a-c87e-4cbf-a506-c1dce22f2555-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.269622 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-networker-vwk9m" event={"ID":"79b4891a-c87e-4cbf-a506-c1dce22f2555","Type":"ContainerDied","Data":"2c5d564d1552c1fed109d88a5a9de704409c0e09cbc5a9a416577984194b946c"} Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.269665 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c5d564d1552c1fed109d88a5a9de704409c0e09cbc5a9a416577984194b946c" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.269959 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-networker-vwk9m" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.353447 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-networker-xvx2j"] Dec 15 09:18:27 crc kubenswrapper[4876]: E1215 09:18:27.355515 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79b4891a-c87e-4cbf-a506-c1dce22f2555" containerName="install-os-openstack-openstack-networker" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.355545 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="79b4891a-c87e-4cbf-a506-c1dce22f2555" containerName="install-os-openstack-openstack-networker" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.355824 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="79b4891a-c87e-4cbf-a506-c1dce22f2555" containerName="install-os-openstack-openstack-networker" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.356933 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.359945 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.360427 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.372933 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-networker-xvx2j"] Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.446123 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-ssh-key\") pod \"configure-os-openstack-openstack-networker-xvx2j\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.446490 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bm2pj\" (UniqueName: \"kubernetes.io/projected/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-kube-api-access-bm2pj\") pod \"configure-os-openstack-openstack-networker-xvx2j\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.447035 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-inventory\") pod \"configure-os-openstack-openstack-networker-xvx2j\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.549395 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-inventory\") pod \"configure-os-openstack-openstack-networker-xvx2j\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.549454 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-ssh-key\") pod \"configure-os-openstack-openstack-networker-xvx2j\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.550326 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bm2pj\" (UniqueName: \"kubernetes.io/projected/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-kube-api-access-bm2pj\") pod \"configure-os-openstack-openstack-networker-xvx2j\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.556219 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-ssh-key\") pod \"configure-os-openstack-openstack-networker-xvx2j\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.559690 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-inventory\") pod \"configure-os-openstack-openstack-networker-xvx2j\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.577942 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bm2pj\" (UniqueName: \"kubernetes.io/projected/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-kube-api-access-bm2pj\") pod \"configure-os-openstack-openstack-networker-xvx2j\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:27 crc kubenswrapper[4876]: I1215 09:18:27.681576 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:18:28 crc kubenswrapper[4876]: I1215 09:18:28.272454 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-networker-xvx2j"] Dec 15 09:18:29 crc kubenswrapper[4876]: I1215 09:18:29.296459 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-networker-xvx2j" event={"ID":"88c5fd56-5e02-4dfc-a89e-06fa1e22f297","Type":"ContainerStarted","Data":"9c066c7640782339a0072b9050db525f8cab445751f1ac61d9881ccdfc0d0054"} Dec 15 09:18:30 crc kubenswrapper[4876]: I1215 09:18:30.308305 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-networker-xvx2j" event={"ID":"88c5fd56-5e02-4dfc-a89e-06fa1e22f297","Type":"ContainerStarted","Data":"9ae07300622f20095f8d3e5b6948596ad8693151d9c4894b8a603e09182f8235"} Dec 15 09:18:30 crc kubenswrapper[4876]: I1215 09:18:30.334281 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-networker-xvx2j" podStartSLOduration=2.407265301 podStartE2EDuration="3.334250621s" podCreationTimestamp="2025-12-15 09:18:27 +0000 UTC" firstStartedPulling="2025-12-15 09:18:28.297958772 +0000 UTC m=+8833.869101683" lastFinishedPulling="2025-12-15 09:18:29.224944092 +0000 UTC m=+8834.796087003" observedRunningTime="2025-12-15 09:18:30.325380422 +0000 UTC m=+8835.896523333" watchObservedRunningTime="2025-12-15 09:18:30.334250621 +0000 UTC m=+8835.905393532" Dec 15 09:18:48 crc kubenswrapper[4876]: I1215 09:18:48.492029 4876 generic.go:334] "Generic (PLEG): container finished" podID="3df0942a-900c-413a-9cde-671328743f83" containerID="46a487a1f9d3e6a94e19036c735c399014ece8a1f79ad89b2cdab3138c45e3da" exitCode=0 Dec 15 09:18:48 crc kubenswrapper[4876]: I1215 09:18:48.492201 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-hgcsp" event={"ID":"3df0942a-900c-413a-9cde-671328743f83","Type":"ContainerDied","Data":"46a487a1f9d3e6a94e19036c735c399014ece8a1f79ad89b2cdab3138c45e3da"} Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.055932 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.251533 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ceph\") pod \"3df0942a-900c-413a-9cde-671328743f83\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.251641 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-inventory\") pod \"3df0942a-900c-413a-9cde-671328743f83\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.251692 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbcl7\" (UniqueName: \"kubernetes.io/projected/3df0942a-900c-413a-9cde-671328743f83-kube-api-access-bbcl7\") pod \"3df0942a-900c-413a-9cde-671328743f83\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.251805 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ssh-key\") pod \"3df0942a-900c-413a-9cde-671328743f83\" (UID: \"3df0942a-900c-413a-9cde-671328743f83\") " Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.256655 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ceph" (OuterVolumeSpecName: "ceph") pod "3df0942a-900c-413a-9cde-671328743f83" (UID: "3df0942a-900c-413a-9cde-671328743f83"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.258385 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3df0942a-900c-413a-9cde-671328743f83-kube-api-access-bbcl7" (OuterVolumeSpecName: "kube-api-access-bbcl7") pod "3df0942a-900c-413a-9cde-671328743f83" (UID: "3df0942a-900c-413a-9cde-671328743f83"). InnerVolumeSpecName "kube-api-access-bbcl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.278097 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3df0942a-900c-413a-9cde-671328743f83" (UID: "3df0942a-900c-413a-9cde-671328743f83"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.284221 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-inventory" (OuterVolumeSpecName: "inventory") pod "3df0942a-900c-413a-9cde-671328743f83" (UID: "3df0942a-900c-413a-9cde-671328743f83"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.353619 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.353653 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.353662 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3df0942a-900c-413a-9cde-671328743f83-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.353672 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbcl7\" (UniqueName: \"kubernetes.io/projected/3df0942a-900c-413a-9cde-671328743f83-kube-api-access-bbcl7\") on node \"crc\" DevicePath \"\"" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.510838 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-hgcsp" event={"ID":"3df0942a-900c-413a-9cde-671328743f83","Type":"ContainerDied","Data":"e2cbac4ac739e45b80f3fbb2b33fae0cd2d13d66eef14dd5b229d2c2f8789539"} Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.510880 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2cbac4ac739e45b80f3fbb2b33fae0cd2d13d66eef14dd5b229d2c2f8789539" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.510908 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-hgcsp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.630520 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-cvdtp"] Dec 15 09:18:50 crc kubenswrapper[4876]: E1215 09:18:50.631011 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3df0942a-900c-413a-9cde-671328743f83" containerName="install-os-openstack-openstack-cell1" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.631027 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3df0942a-900c-413a-9cde-671328743f83" containerName="install-os-openstack-openstack-cell1" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.631263 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3df0942a-900c-413a-9cde-671328743f83" containerName="install-os-openstack-openstack-cell1" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.631965 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.634848 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.635353 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.643373 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-cvdtp"] Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.764305 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ceph\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.764392 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-inventory\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.764528 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ssh-key\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.764561 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cpfn\" (UniqueName: \"kubernetes.io/projected/fb76a082-a81a-498d-9be3-3af9e9b5b01e-kube-api-access-8cpfn\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.866193 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-inventory\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.866358 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ssh-key\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.866401 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cpfn\" (UniqueName: \"kubernetes.io/projected/fb76a082-a81a-498d-9be3-3af9e9b5b01e-kube-api-access-8cpfn\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.866461 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ceph\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.870691 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ssh-key\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.871242 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-inventory\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.871872 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ceph\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.883810 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cpfn\" (UniqueName: \"kubernetes.io/projected/fb76a082-a81a-498d-9be3-3af9e9b5b01e-kube-api-access-8cpfn\") pod \"configure-os-openstack-openstack-cell1-cvdtp\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:50 crc kubenswrapper[4876]: I1215 09:18:50.950516 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:18:51 crc kubenswrapper[4876]: I1215 09:18:51.501611 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-cvdtp"] Dec 15 09:18:51 crc kubenswrapper[4876]: W1215 09:18:51.757795 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb76a082_a81a_498d_9be3_3af9e9b5b01e.slice/crio-54dc02a5cac8a2a317957f578d0409061f9f464b5073d5974430835fac461e93 WatchSource:0}: Error finding container 54dc02a5cac8a2a317957f578d0409061f9f464b5073d5974430835fac461e93: Status 404 returned error can't find the container with id 54dc02a5cac8a2a317957f578d0409061f9f464b5073d5974430835fac461e93 Dec 15 09:18:52 crc kubenswrapper[4876]: I1215 09:18:52.530933 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" event={"ID":"fb76a082-a81a-498d-9be3-3af9e9b5b01e","Type":"ContainerStarted","Data":"cb127d5e9d8826f712f1c4782824564aa77a104a68609192e09c34d0bd571fa5"} Dec 15 09:18:52 crc kubenswrapper[4876]: I1215 09:18:52.531551 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" event={"ID":"fb76a082-a81a-498d-9be3-3af9e9b5b01e","Type":"ContainerStarted","Data":"54dc02a5cac8a2a317957f578d0409061f9f464b5073d5974430835fac461e93"} Dec 15 09:18:52 crc kubenswrapper[4876]: I1215 09:18:52.549379 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" podStartSLOduration=2.089962219 podStartE2EDuration="2.54935477s" podCreationTimestamp="2025-12-15 09:18:50 +0000 UTC" firstStartedPulling="2025-12-15 09:18:51.760464308 +0000 UTC m=+8857.331607219" lastFinishedPulling="2025-12-15 09:18:52.219856849 +0000 UTC m=+8857.790999770" observedRunningTime="2025-12-15 09:18:52.548135707 +0000 UTC m=+8858.119278638" watchObservedRunningTime="2025-12-15 09:18:52.54935477 +0000 UTC m=+8858.120497691" Dec 15 09:19:25 crc kubenswrapper[4876]: I1215 09:19:25.838987 4876 generic.go:334] "Generic (PLEG): container finished" podID="88c5fd56-5e02-4dfc-a89e-06fa1e22f297" containerID="9ae07300622f20095f8d3e5b6948596ad8693151d9c4894b8a603e09182f8235" exitCode=0 Dec 15 09:19:25 crc kubenswrapper[4876]: I1215 09:19:25.839066 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-networker-xvx2j" event={"ID":"88c5fd56-5e02-4dfc-a89e-06fa1e22f297","Type":"ContainerDied","Data":"9ae07300622f20095f8d3e5b6948596ad8693151d9c4894b8a603e09182f8235"} Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.323311 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.323693 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.362448 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.469863 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-inventory\") pod \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.469974 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-ssh-key\") pod \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.470295 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bm2pj\" (UniqueName: \"kubernetes.io/projected/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-kube-api-access-bm2pj\") pod \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\" (UID: \"88c5fd56-5e02-4dfc-a89e-06fa1e22f297\") " Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.477647 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-kube-api-access-bm2pj" (OuterVolumeSpecName: "kube-api-access-bm2pj") pod "88c5fd56-5e02-4dfc-a89e-06fa1e22f297" (UID: "88c5fd56-5e02-4dfc-a89e-06fa1e22f297"). InnerVolumeSpecName "kube-api-access-bm2pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.505778 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-inventory" (OuterVolumeSpecName: "inventory") pod "88c5fd56-5e02-4dfc-a89e-06fa1e22f297" (UID: "88c5fd56-5e02-4dfc-a89e-06fa1e22f297"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.506627 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "88c5fd56-5e02-4dfc-a89e-06fa1e22f297" (UID: "88c5fd56-5e02-4dfc-a89e-06fa1e22f297"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.572806 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bm2pj\" (UniqueName: \"kubernetes.io/projected/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-kube-api-access-bm2pj\") on node \"crc\" DevicePath \"\"" Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.572953 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.573031 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/88c5fd56-5e02-4dfc-a89e-06fa1e22f297-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.861079 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-networker-xvx2j" event={"ID":"88c5fd56-5e02-4dfc-a89e-06fa1e22f297","Type":"ContainerDied","Data":"9c066c7640782339a0072b9050db525f8cab445751f1ac61d9881ccdfc0d0054"} Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.861740 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c066c7640782339a0072b9050db525f8cab445751f1ac61d9881ccdfc0d0054" Dec 15 09:19:27 crc kubenswrapper[4876]: I1215 09:19:27.861201 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-networker-xvx2j" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.008393 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-networker-qc76v"] Dec 15 09:19:28 crc kubenswrapper[4876]: E1215 09:19:28.009524 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88c5fd56-5e02-4dfc-a89e-06fa1e22f297" containerName="configure-os-openstack-openstack-networker" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.009575 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="88c5fd56-5e02-4dfc-a89e-06fa1e22f297" containerName="configure-os-openstack-openstack-networker" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.010018 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="88c5fd56-5e02-4dfc-a89e-06fa1e22f297" containerName="configure-os-openstack-openstack-networker" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.011594 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.015525 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.015501 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.033379 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-networker-qc76v"] Dec 15 09:19:28 crc kubenswrapper[4876]: E1215 09:19:28.077676 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88c5fd56_5e02_4dfc_a89e_06fa1e22f297.slice\": RecentStats: unable to find data in memory cache]" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.188736 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwxqr\" (UniqueName: \"kubernetes.io/projected/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-kube-api-access-xwxqr\") pod \"run-os-openstack-openstack-networker-qc76v\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.188813 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-inventory\") pod \"run-os-openstack-openstack-networker-qc76v\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.189260 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-ssh-key\") pod \"run-os-openstack-openstack-networker-qc76v\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.291288 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-ssh-key\") pod \"run-os-openstack-openstack-networker-qc76v\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.291773 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwxqr\" (UniqueName: \"kubernetes.io/projected/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-kube-api-access-xwxqr\") pod \"run-os-openstack-openstack-networker-qc76v\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.291942 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-inventory\") pod \"run-os-openstack-openstack-networker-qc76v\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.297839 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-inventory\") pod \"run-os-openstack-openstack-networker-qc76v\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.317562 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-ssh-key\") pod \"run-os-openstack-openstack-networker-qc76v\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.318183 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwxqr\" (UniqueName: \"kubernetes.io/projected/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-kube-api-access-xwxqr\") pod \"run-os-openstack-openstack-networker-qc76v\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.336948 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:28 crc kubenswrapper[4876]: I1215 09:19:28.881179 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-networker-qc76v"] Dec 15 09:19:29 crc kubenswrapper[4876]: I1215 09:19:29.878383 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-networker-qc76v" event={"ID":"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978","Type":"ContainerStarted","Data":"1673264473c87062cdd6a4e36e8141f24909594c0659a600384479da1fc527cc"} Dec 15 09:19:30 crc kubenswrapper[4876]: I1215 09:19:30.893803 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-networker-qc76v" event={"ID":"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978","Type":"ContainerStarted","Data":"578f99ca2e4c02a5a7e36e051f627b79dd47af54d1efb23d6a3f200d41f3aea1"} Dec 15 09:19:30 crc kubenswrapper[4876]: I1215 09:19:30.916453 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-networker-qc76v" podStartSLOduration=3.172553056 podStartE2EDuration="3.916380628s" podCreationTimestamp="2025-12-15 09:19:27 +0000 UTC" firstStartedPulling="2025-12-15 09:19:28.880019626 +0000 UTC m=+8894.451162537" lastFinishedPulling="2025-12-15 09:19:29.623847198 +0000 UTC m=+8895.194990109" observedRunningTime="2025-12-15 09:19:30.914659352 +0000 UTC m=+8896.485802263" watchObservedRunningTime="2025-12-15 09:19:30.916380628 +0000 UTC m=+8896.487523539" Dec 15 09:19:39 crc kubenswrapper[4876]: I1215 09:19:39.974510 4876 generic.go:334] "Generic (PLEG): container finished" podID="ba1fd6d1-ba1e-4b71-ae44-88c90eebc978" containerID="578f99ca2e4c02a5a7e36e051f627b79dd47af54d1efb23d6a3f200d41f3aea1" exitCode=0 Dec 15 09:19:39 crc kubenswrapper[4876]: I1215 09:19:39.975070 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-networker-qc76v" event={"ID":"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978","Type":"ContainerDied","Data":"578f99ca2e4c02a5a7e36e051f627b79dd47af54d1efb23d6a3f200d41f3aea1"} Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.480560 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.567480 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwxqr\" (UniqueName: \"kubernetes.io/projected/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-kube-api-access-xwxqr\") pod \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.567566 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-ssh-key\") pod \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.567629 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-inventory\") pod \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\" (UID: \"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978\") " Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.586333 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-kube-api-access-xwxqr" (OuterVolumeSpecName: "kube-api-access-xwxqr") pod "ba1fd6d1-ba1e-4b71-ae44-88c90eebc978" (UID: "ba1fd6d1-ba1e-4b71-ae44-88c90eebc978"). InnerVolumeSpecName "kube-api-access-xwxqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.599670 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-inventory" (OuterVolumeSpecName: "inventory") pod "ba1fd6d1-ba1e-4b71-ae44-88c90eebc978" (UID: "ba1fd6d1-ba1e-4b71-ae44-88c90eebc978"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.607373 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ba1fd6d1-ba1e-4b71-ae44-88c90eebc978" (UID: "ba1fd6d1-ba1e-4b71-ae44-88c90eebc978"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.670776 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwxqr\" (UniqueName: \"kubernetes.io/projected/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-kube-api-access-xwxqr\") on node \"crc\" DevicePath \"\"" Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.670844 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.670859 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ba1fd6d1-ba1e-4b71-ae44-88c90eebc978-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.997534 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-networker-qc76v" event={"ID":"ba1fd6d1-ba1e-4b71-ae44-88c90eebc978","Type":"ContainerDied","Data":"1673264473c87062cdd6a4e36e8141f24909594c0659a600384479da1fc527cc"} Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.997583 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1673264473c87062cdd6a4e36e8141f24909594c0659a600384479da1fc527cc" Dec 15 09:19:41 crc kubenswrapper[4876]: I1215 09:19:41.997610 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-networker-qc76v" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.081873 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-networker-q9k9q"] Dec 15 09:19:42 crc kubenswrapper[4876]: E1215 09:19:42.082774 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba1fd6d1-ba1e-4b71-ae44-88c90eebc978" containerName="run-os-openstack-openstack-networker" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.082802 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba1fd6d1-ba1e-4b71-ae44-88c90eebc978" containerName="run-os-openstack-openstack-networker" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.083093 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba1fd6d1-ba1e-4b71-ae44-88c90eebc978" containerName="run-os-openstack-openstack-networker" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.084017 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.093060 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.093364 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.099137 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-networker-q9k9q"] Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.181303 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-ssh-key\") pod \"reboot-os-openstack-openstack-networker-q9k9q\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.181361 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dkhg\" (UniqueName: \"kubernetes.io/projected/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-kube-api-access-2dkhg\") pod \"reboot-os-openstack-openstack-networker-q9k9q\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.181409 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-inventory\") pod \"reboot-os-openstack-openstack-networker-q9k9q\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.283300 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-ssh-key\") pod \"reboot-os-openstack-openstack-networker-q9k9q\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.283355 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dkhg\" (UniqueName: \"kubernetes.io/projected/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-kube-api-access-2dkhg\") pod \"reboot-os-openstack-openstack-networker-q9k9q\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.283393 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-inventory\") pod \"reboot-os-openstack-openstack-networker-q9k9q\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.287522 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-inventory\") pod \"reboot-os-openstack-openstack-networker-q9k9q\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.287525 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-ssh-key\") pod \"reboot-os-openstack-openstack-networker-q9k9q\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.300902 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dkhg\" (UniqueName: \"kubernetes.io/projected/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-kube-api-access-2dkhg\") pod \"reboot-os-openstack-openstack-networker-q9k9q\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:42 crc kubenswrapper[4876]: I1215 09:19:42.407431 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:19:43 crc kubenswrapper[4876]: I1215 09:19:43.005917 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-networker-q9k9q"] Dec 15 09:19:44 crc kubenswrapper[4876]: I1215 09:19:44.015860 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" event={"ID":"9c9ecea0-529d-4c59-b9ce-b822f44b99d8","Type":"ContainerStarted","Data":"e11dc198d640b8dd8e10378c267c62e39b622874eb96a4bcf4d99d2233a8f466"} Dec 15 09:19:46 crc kubenswrapper[4876]: I1215 09:19:46.035064 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" event={"ID":"9c9ecea0-529d-4c59-b9ce-b822f44b99d8","Type":"ContainerStarted","Data":"a112d1abad445071ac3c86d66b697f1e5cc331b9bdc7a08321f84471dce3dca3"} Dec 15 09:19:46 crc kubenswrapper[4876]: I1215 09:19:46.059384 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" podStartSLOduration=2.308521285 podStartE2EDuration="4.059365075s" podCreationTimestamp="2025-12-15 09:19:42 +0000 UTC" firstStartedPulling="2025-12-15 09:19:43.467097313 +0000 UTC m=+8909.038240234" lastFinishedPulling="2025-12-15 09:19:45.217941113 +0000 UTC m=+8910.789084024" observedRunningTime="2025-12-15 09:19:46.04982502 +0000 UTC m=+8911.620967941" watchObservedRunningTime="2025-12-15 09:19:46.059365075 +0000 UTC m=+8911.630507986" Dec 15 09:19:49 crc kubenswrapper[4876]: I1215 09:19:49.070553 4876 generic.go:334] "Generic (PLEG): container finished" podID="fb76a082-a81a-498d-9be3-3af9e9b5b01e" containerID="cb127d5e9d8826f712f1c4782824564aa77a104a68609192e09c34d0bd571fa5" exitCode=0 Dec 15 09:19:49 crc kubenswrapper[4876]: I1215 09:19:49.070648 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" event={"ID":"fb76a082-a81a-498d-9be3-3af9e9b5b01e","Type":"ContainerDied","Data":"cb127d5e9d8826f712f1c4782824564aa77a104a68609192e09c34d0bd571fa5"} Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.517489 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.565023 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-inventory\") pod \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.565069 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cpfn\" (UniqueName: \"kubernetes.io/projected/fb76a082-a81a-498d-9be3-3af9e9b5b01e-kube-api-access-8cpfn\") pod \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.565087 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ssh-key\") pod \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.565266 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ceph\") pod \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\" (UID: \"fb76a082-a81a-498d-9be3-3af9e9b5b01e\") " Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.571137 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ceph" (OuterVolumeSpecName: "ceph") pod "fb76a082-a81a-498d-9be3-3af9e9b5b01e" (UID: "fb76a082-a81a-498d-9be3-3af9e9b5b01e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.571421 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb76a082-a81a-498d-9be3-3af9e9b5b01e-kube-api-access-8cpfn" (OuterVolumeSpecName: "kube-api-access-8cpfn") pod "fb76a082-a81a-498d-9be3-3af9e9b5b01e" (UID: "fb76a082-a81a-498d-9be3-3af9e9b5b01e"). InnerVolumeSpecName "kube-api-access-8cpfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.602157 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fb76a082-a81a-498d-9be3-3af9e9b5b01e" (UID: "fb76a082-a81a-498d-9be3-3af9e9b5b01e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.602750 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-inventory" (OuterVolumeSpecName: "inventory") pod "fb76a082-a81a-498d-9be3-3af9e9b5b01e" (UID: "fb76a082-a81a-498d-9be3-3af9e9b5b01e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.667821 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.667865 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cpfn\" (UniqueName: \"kubernetes.io/projected/fb76a082-a81a-498d-9be3-3af9e9b5b01e-kube-api-access-8cpfn\") on node \"crc\" DevicePath \"\"" Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.667882 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:19:50 crc kubenswrapper[4876]: I1215 09:19:50.667895 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fb76a082-a81a-498d-9be3-3af9e9b5b01e-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.091389 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" event={"ID":"fb76a082-a81a-498d-9be3-3af9e9b5b01e","Type":"ContainerDied","Data":"54dc02a5cac8a2a317957f578d0409061f9f464b5073d5974430835fac461e93"} Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.091442 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54dc02a5cac8a2a317957f578d0409061f9f464b5073d5974430835fac461e93" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.091462 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-cvdtp" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.164059 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-vmx88"] Dec 15 09:19:51 crc kubenswrapper[4876]: E1215 09:19:51.164480 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb76a082-a81a-498d-9be3-3af9e9b5b01e" containerName="configure-os-openstack-openstack-cell1" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.164497 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb76a082-a81a-498d-9be3-3af9e9b5b01e" containerName="configure-os-openstack-openstack-cell1" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.164682 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb76a082-a81a-498d-9be3-3af9e9b5b01e" containerName="configure-os-openstack-openstack-cell1" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.165432 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.167940 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.168050 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.176458 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-vmx88"] Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.279019 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-0\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.279175 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ceph\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.279350 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhjsb\" (UniqueName: \"kubernetes.io/projected/f8ae1854-b545-42e7-acbf-c64fc343ae54-kube-api-access-vhjsb\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.279493 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-1\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.279606 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-networker\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-networker\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.279637 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.381319 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-networker\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-networker\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.381655 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.381700 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-0\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.382316 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ceph\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.382388 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhjsb\" (UniqueName: \"kubernetes.io/projected/f8ae1854-b545-42e7-acbf-c64fc343ae54-kube-api-access-vhjsb\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.382433 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-1\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.386170 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-0\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.386348 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ceph\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.386937 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-networker\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-networker\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.387619 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.394219 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-1\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.399690 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhjsb\" (UniqueName: \"kubernetes.io/projected/f8ae1854-b545-42e7-acbf-c64fc343ae54-kube-api-access-vhjsb\") pod \"ssh-known-hosts-openstack-vmx88\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:51 crc kubenswrapper[4876]: I1215 09:19:51.481211 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:19:52 crc kubenswrapper[4876]: I1215 09:19:52.008727 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-vmx88"] Dec 15 09:19:52 crc kubenswrapper[4876]: I1215 09:19:52.101791 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-vmx88" event={"ID":"f8ae1854-b545-42e7-acbf-c64fc343ae54","Type":"ContainerStarted","Data":"6b4aa986e6202c7e587d71fd2ca77090772149bb4dcea48aab65a847e01443d0"} Dec 15 09:19:53 crc kubenswrapper[4876]: I1215 09:19:53.113169 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-vmx88" event={"ID":"f8ae1854-b545-42e7-acbf-c64fc343ae54","Type":"ContainerStarted","Data":"e90a138f4cfe603c58e5769ffd5ee2df07f584ed58d421452775c71a63fdf33a"} Dec 15 09:19:53 crc kubenswrapper[4876]: I1215 09:19:53.135640 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-vmx88" podStartSLOduration=1.551933028 podStartE2EDuration="2.135612426s" podCreationTimestamp="2025-12-15 09:19:51 +0000 UTC" firstStartedPulling="2025-12-15 09:19:52.017309837 +0000 UTC m=+8917.588452748" lastFinishedPulling="2025-12-15 09:19:52.600989235 +0000 UTC m=+8918.172132146" observedRunningTime="2025-12-15 09:19:53.131755413 +0000 UTC m=+8918.702898344" watchObservedRunningTime="2025-12-15 09:19:53.135612426 +0000 UTC m=+8918.706755347" Dec 15 09:19:56 crc kubenswrapper[4876]: I1215 09:19:56.857405 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6ljsj"] Dec 15 09:19:56 crc kubenswrapper[4876]: I1215 09:19:56.860562 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:56 crc kubenswrapper[4876]: I1215 09:19:56.874339 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6ljsj"] Dec 15 09:19:56 crc kubenswrapper[4876]: I1215 09:19:56.924163 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-utilities\") pod \"community-operators-6ljsj\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:56 crc kubenswrapper[4876]: I1215 09:19:56.924263 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4m78\" (UniqueName: \"kubernetes.io/projected/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-kube-api-access-d4m78\") pod \"community-operators-6ljsj\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:56 crc kubenswrapper[4876]: I1215 09:19:56.924358 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-catalog-content\") pod \"community-operators-6ljsj\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:57 crc kubenswrapper[4876]: I1215 09:19:57.025957 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-catalog-content\") pod \"community-operators-6ljsj\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:57 crc kubenswrapper[4876]: I1215 09:19:57.026133 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-utilities\") pod \"community-operators-6ljsj\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:57 crc kubenswrapper[4876]: I1215 09:19:57.026221 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4m78\" (UniqueName: \"kubernetes.io/projected/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-kube-api-access-d4m78\") pod \"community-operators-6ljsj\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:57 crc kubenswrapper[4876]: I1215 09:19:57.026619 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-catalog-content\") pod \"community-operators-6ljsj\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:57 crc kubenswrapper[4876]: I1215 09:19:57.026668 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-utilities\") pod \"community-operators-6ljsj\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:57 crc kubenswrapper[4876]: I1215 09:19:57.048274 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4m78\" (UniqueName: \"kubernetes.io/projected/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-kube-api-access-d4m78\") pod \"community-operators-6ljsj\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:57 crc kubenswrapper[4876]: I1215 09:19:57.181204 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:19:57 crc kubenswrapper[4876]: I1215 09:19:57.322855 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:19:57 crc kubenswrapper[4876]: I1215 09:19:57.323158 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:19:57 crc kubenswrapper[4876]: I1215 09:19:57.767140 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6ljsj"] Dec 15 09:19:58 crc kubenswrapper[4876]: I1215 09:19:58.159726 4876 generic.go:334] "Generic (PLEG): container finished" podID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerID="0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92" exitCode=0 Dec 15 09:19:58 crc kubenswrapper[4876]: I1215 09:19:58.159772 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6ljsj" event={"ID":"4b9e51ad-4e88-453c-928f-a1d8ef15dc87","Type":"ContainerDied","Data":"0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92"} Dec 15 09:19:58 crc kubenswrapper[4876]: I1215 09:19:58.159800 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6ljsj" event={"ID":"4b9e51ad-4e88-453c-928f-a1d8ef15dc87","Type":"ContainerStarted","Data":"a11ed2a27e5c1e7b1fc40c5a09110a0d2baaaed107695cc2cd135e0256f4b36c"} Dec 15 09:19:58 crc kubenswrapper[4876]: I1215 09:19:58.162298 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 09:19:59 crc kubenswrapper[4876]: I1215 09:19:59.170800 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6ljsj" event={"ID":"4b9e51ad-4e88-453c-928f-a1d8ef15dc87","Type":"ContainerStarted","Data":"6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2"} Dec 15 09:20:01 crc kubenswrapper[4876]: I1215 09:20:01.192351 4876 generic.go:334] "Generic (PLEG): container finished" podID="9c9ecea0-529d-4c59-b9ce-b822f44b99d8" containerID="a112d1abad445071ac3c86d66b697f1e5cc331b9bdc7a08321f84471dce3dca3" exitCode=0 Dec 15 09:20:01 crc kubenswrapper[4876]: I1215 09:20:01.192490 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" event={"ID":"9c9ecea0-529d-4c59-b9ce-b822f44b99d8","Type":"ContainerDied","Data":"a112d1abad445071ac3c86d66b697f1e5cc331b9bdc7a08321f84471dce3dca3"} Dec 15 09:20:01 crc kubenswrapper[4876]: I1215 09:20:01.196191 4876 generic.go:334] "Generic (PLEG): container finished" podID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerID="6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2" exitCode=0 Dec 15 09:20:01 crc kubenswrapper[4876]: I1215 09:20:01.196258 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6ljsj" event={"ID":"4b9e51ad-4e88-453c-928f-a1d8ef15dc87","Type":"ContainerDied","Data":"6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2"} Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.208133 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6ljsj" event={"ID":"4b9e51ad-4e88-453c-928f-a1d8ef15dc87","Type":"ContainerStarted","Data":"4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a"} Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.238208 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6ljsj" podStartSLOduration=2.738416087 podStartE2EDuration="6.238189867s" podCreationTimestamp="2025-12-15 09:19:56 +0000 UTC" firstStartedPulling="2025-12-15 09:19:58.162044935 +0000 UTC m=+8923.733187846" lastFinishedPulling="2025-12-15 09:20:01.661818715 +0000 UTC m=+8927.232961626" observedRunningTime="2025-12-15 09:20:02.227373817 +0000 UTC m=+8927.798516728" watchObservedRunningTime="2025-12-15 09:20:02.238189867 +0000 UTC m=+8927.809332788" Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.739713 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.856958 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-ssh-key\") pod \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.857030 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dkhg\" (UniqueName: \"kubernetes.io/projected/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-kube-api-access-2dkhg\") pod \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.857059 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-inventory\") pod \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\" (UID: \"9c9ecea0-529d-4c59-b9ce-b822f44b99d8\") " Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.864259 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-kube-api-access-2dkhg" (OuterVolumeSpecName: "kube-api-access-2dkhg") pod "9c9ecea0-529d-4c59-b9ce-b822f44b99d8" (UID: "9c9ecea0-529d-4c59-b9ce-b822f44b99d8"). InnerVolumeSpecName "kube-api-access-2dkhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.889345 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-inventory" (OuterVolumeSpecName: "inventory") pod "9c9ecea0-529d-4c59-b9ce-b822f44b99d8" (UID: "9c9ecea0-529d-4c59-b9ce-b822f44b99d8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.889904 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9c9ecea0-529d-4c59-b9ce-b822f44b99d8" (UID: "9c9ecea0-529d-4c59-b9ce-b822f44b99d8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.959496 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.959550 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:02 crc kubenswrapper[4876]: I1215 09:20:02.959562 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dkhg\" (UniqueName: \"kubernetes.io/projected/9c9ecea0-529d-4c59-b9ce-b822f44b99d8-kube-api-access-2dkhg\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.223022 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" event={"ID":"9c9ecea0-529d-4c59-b9ce-b822f44b99d8","Type":"ContainerDied","Data":"e11dc198d640b8dd8e10378c267c62e39b622874eb96a4bcf4d99d2233a8f466"} Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.223071 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e11dc198d640b8dd8e10378c267c62e39b622874eb96a4bcf4d99d2233a8f466" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.223094 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-networker-q9k9q" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.320895 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-networker-ll6qb"] Dec 15 09:20:03 crc kubenswrapper[4876]: E1215 09:20:03.321391 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c9ecea0-529d-4c59-b9ce-b822f44b99d8" containerName="reboot-os-openstack-openstack-networker" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.321411 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c9ecea0-529d-4c59-b9ce-b822f44b99d8" containerName="reboot-os-openstack-openstack-networker" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.321604 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c9ecea0-529d-4c59-b9ce-b822f44b99d8" containerName="reboot-os-openstack-openstack-networker" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.322445 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.325156 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.342152 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-networker-ll6qb"] Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.380639 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.380781 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhncd\" (UniqueName: \"kubernetes.io/projected/8f5d7261-64ac-49b8-a081-34c57e792c7b-kube-api-access-rhncd\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.380832 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-inventory\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.380869 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.380931 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.381003 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ssh-key\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.482578 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhncd\" (UniqueName: \"kubernetes.io/projected/8f5d7261-64ac-49b8-a081-34c57e792c7b-kube-api-access-rhncd\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.482655 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-inventory\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.482677 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.482722 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.482774 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ssh-key\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.482816 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.486315 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ssh-key\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.486854 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.498248 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-inventory\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.498994 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.501096 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.505966 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhncd\" (UniqueName: \"kubernetes.io/projected/8f5d7261-64ac-49b8-a081-34c57e792c7b-kube-api-access-rhncd\") pod \"install-certs-openstack-openstack-networker-ll6qb\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:03 crc kubenswrapper[4876]: I1215 09:20:03.690161 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:04 crc kubenswrapper[4876]: I1215 09:20:04.255457 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-networker-ll6qb"] Dec 15 09:20:04 crc kubenswrapper[4876]: W1215 09:20:04.257547 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f5d7261_64ac_49b8_a081_34c57e792c7b.slice/crio-ee577d75a1c357e0fe102af178e1716bc8e0ba36eeac058482ee95fb0c660984 WatchSource:0}: Error finding container ee577d75a1c357e0fe102af178e1716bc8e0ba36eeac058482ee95fb0c660984: Status 404 returned error can't find the container with id ee577d75a1c357e0fe102af178e1716bc8e0ba36eeac058482ee95fb0c660984 Dec 15 09:20:05 crc kubenswrapper[4876]: I1215 09:20:05.242069 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-networker-ll6qb" event={"ID":"8f5d7261-64ac-49b8-a081-34c57e792c7b","Type":"ContainerStarted","Data":"ee577d75a1c357e0fe102af178e1716bc8e0ba36eeac058482ee95fb0c660984"} Dec 15 09:20:06 crc kubenswrapper[4876]: I1215 09:20:06.253916 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-networker-ll6qb" event={"ID":"8f5d7261-64ac-49b8-a081-34c57e792c7b","Type":"ContainerStarted","Data":"9c686694c51ebbbef0fcb8cf1a84556b34b228887721cb380242ca42f4ac7f7e"} Dec 15 09:20:06 crc kubenswrapper[4876]: I1215 09:20:06.273847 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-networker-ll6qb" podStartSLOduration=2.824228355 podStartE2EDuration="3.273824992s" podCreationTimestamp="2025-12-15 09:20:03 +0000 UTC" firstStartedPulling="2025-12-15 09:20:04.260378957 +0000 UTC m=+8929.831521868" lastFinishedPulling="2025-12-15 09:20:04.709975594 +0000 UTC m=+8930.281118505" observedRunningTime="2025-12-15 09:20:06.271886139 +0000 UTC m=+8931.843029070" watchObservedRunningTime="2025-12-15 09:20:06.273824992 +0000 UTC m=+8931.844967903" Dec 15 09:20:07 crc kubenswrapper[4876]: I1215 09:20:07.181831 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:20:07 crc kubenswrapper[4876]: I1215 09:20:07.182229 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:20:07 crc kubenswrapper[4876]: I1215 09:20:07.313367 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:20:07 crc kubenswrapper[4876]: I1215 09:20:07.372395 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:20:07 crc kubenswrapper[4876]: I1215 09:20:07.556726 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6ljsj"] Dec 15 09:20:09 crc kubenswrapper[4876]: I1215 09:20:09.286344 4876 generic.go:334] "Generic (PLEG): container finished" podID="f8ae1854-b545-42e7-acbf-c64fc343ae54" containerID="e90a138f4cfe603c58e5769ffd5ee2df07f584ed58d421452775c71a63fdf33a" exitCode=0 Dec 15 09:20:09 crc kubenswrapper[4876]: I1215 09:20:09.287022 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6ljsj" podUID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerName="registry-server" containerID="cri-o://4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a" gracePeriod=2 Dec 15 09:20:09 crc kubenswrapper[4876]: I1215 09:20:09.287492 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-vmx88" event={"ID":"f8ae1854-b545-42e7-acbf-c64fc343ae54","Type":"ContainerDied","Data":"e90a138f4cfe603c58e5769ffd5ee2df07f584ed58d421452775c71a63fdf33a"} Dec 15 09:20:09 crc kubenswrapper[4876]: I1215 09:20:09.791683 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:20:09 crc kubenswrapper[4876]: I1215 09:20:09.915265 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-catalog-content\") pod \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " Dec 15 09:20:09 crc kubenswrapper[4876]: I1215 09:20:09.915453 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-utilities\") pod \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " Dec 15 09:20:09 crc kubenswrapper[4876]: I1215 09:20:09.915508 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4m78\" (UniqueName: \"kubernetes.io/projected/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-kube-api-access-d4m78\") pod \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\" (UID: \"4b9e51ad-4e88-453c-928f-a1d8ef15dc87\") " Dec 15 09:20:09 crc kubenswrapper[4876]: I1215 09:20:09.916589 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-utilities" (OuterVolumeSpecName: "utilities") pod "4b9e51ad-4e88-453c-928f-a1d8ef15dc87" (UID: "4b9e51ad-4e88-453c-928f-a1d8ef15dc87"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:20:09 crc kubenswrapper[4876]: I1215 09:20:09.921968 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-kube-api-access-d4m78" (OuterVolumeSpecName: "kube-api-access-d4m78") pod "4b9e51ad-4e88-453c-928f-a1d8ef15dc87" (UID: "4b9e51ad-4e88-453c-928f-a1d8ef15dc87"). InnerVolumeSpecName "kube-api-access-d4m78". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:20:09 crc kubenswrapper[4876]: I1215 09:20:09.972159 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4b9e51ad-4e88-453c-928f-a1d8ef15dc87" (UID: "4b9e51ad-4e88-453c-928f-a1d8ef15dc87"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.018145 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.018187 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.018197 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4m78\" (UniqueName: \"kubernetes.io/projected/4b9e51ad-4e88-453c-928f-a1d8ef15dc87-kube-api-access-d4m78\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.299594 4876 generic.go:334] "Generic (PLEG): container finished" podID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerID="4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a" exitCode=0 Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.299663 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6ljsj" event={"ID":"4b9e51ad-4e88-453c-928f-a1d8ef15dc87","Type":"ContainerDied","Data":"4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a"} Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.299711 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6ljsj" event={"ID":"4b9e51ad-4e88-453c-928f-a1d8ef15dc87","Type":"ContainerDied","Data":"a11ed2a27e5c1e7b1fc40c5a09110a0d2baaaed107695cc2cd135e0256f4b36c"} Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.299734 4876 scope.go:117] "RemoveContainer" containerID="4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.300376 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6ljsj" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.337270 4876 scope.go:117] "RemoveContainer" containerID="6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.349287 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6ljsj"] Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.359834 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6ljsj"] Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.368528 4876 scope.go:117] "RemoveContainer" containerID="0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.427864 4876 scope.go:117] "RemoveContainer" containerID="4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a" Dec 15 09:20:10 crc kubenswrapper[4876]: E1215 09:20:10.429043 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a\": container with ID starting with 4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a not found: ID does not exist" containerID="4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.429081 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a"} err="failed to get container status \"4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a\": rpc error: code = NotFound desc = could not find container \"4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a\": container with ID starting with 4c4df6324113cc2759ed8e34dd8e4439ce6061b3c5ba86a3dd538e715b554a3a not found: ID does not exist" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.429118 4876 scope.go:117] "RemoveContainer" containerID="6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2" Dec 15 09:20:10 crc kubenswrapper[4876]: E1215 09:20:10.429692 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2\": container with ID starting with 6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2 not found: ID does not exist" containerID="6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.429743 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2"} err="failed to get container status \"6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2\": rpc error: code = NotFound desc = could not find container \"6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2\": container with ID starting with 6961a7bdf8959bd6fe623633b3197fe214d1f800d80e042e1abee14de6f2b1f2 not found: ID does not exist" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.429775 4876 scope.go:117] "RemoveContainer" containerID="0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92" Dec 15 09:20:10 crc kubenswrapper[4876]: E1215 09:20:10.430264 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92\": container with ID starting with 0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92 not found: ID does not exist" containerID="0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.430289 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92"} err="failed to get container status \"0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92\": rpc error: code = NotFound desc = could not find container \"0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92\": container with ID starting with 0b1735a302a958ee089de24c8227ef6b1d0473afb6e2d20eadb8e1e2ff11ae92 not found: ID does not exist" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.718693 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" path="/var/lib/kubelet/pods/4b9e51ad-4e88-453c-928f-a1d8ef15dc87/volumes" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.778696 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.838534 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-networker\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-networker\") pod \"f8ae1854-b545-42e7-acbf-c64fc343ae54\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.838630 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ceph\") pod \"f8ae1854-b545-42e7-acbf-c64fc343ae54\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.838655 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhjsb\" (UniqueName: \"kubernetes.io/projected/f8ae1854-b545-42e7-acbf-c64fc343ae54-kube-api-access-vhjsb\") pod \"f8ae1854-b545-42e7-acbf-c64fc343ae54\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.838750 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-0\") pod \"f8ae1854-b545-42e7-acbf-c64fc343ae54\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.838853 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-1\") pod \"f8ae1854-b545-42e7-acbf-c64fc343ae54\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.838890 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-cell1\") pod \"f8ae1854-b545-42e7-acbf-c64fc343ae54\" (UID: \"f8ae1854-b545-42e7-acbf-c64fc343ae54\") " Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.846387 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ceph" (OuterVolumeSpecName: "ceph") pod "f8ae1854-b545-42e7-acbf-c64fc343ae54" (UID: "f8ae1854-b545-42e7-acbf-c64fc343ae54"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.849350 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ae1854-b545-42e7-acbf-c64fc343ae54-kube-api-access-vhjsb" (OuterVolumeSpecName: "kube-api-access-vhjsb") pod "f8ae1854-b545-42e7-acbf-c64fc343ae54" (UID: "f8ae1854-b545-42e7-acbf-c64fc343ae54"). InnerVolumeSpecName "kube-api-access-vhjsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.872231 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "f8ae1854-b545-42e7-acbf-c64fc343ae54" (UID: "f8ae1854-b545-42e7-acbf-c64fc343ae54"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.876266 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-1" (OuterVolumeSpecName: "inventory-1") pod "f8ae1854-b545-42e7-acbf-c64fc343ae54" (UID: "f8ae1854-b545-42e7-acbf-c64fc343ae54"). InnerVolumeSpecName "inventory-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.877227 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "f8ae1854-b545-42e7-acbf-c64fc343ae54" (UID: "f8ae1854-b545-42e7-acbf-c64fc343ae54"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.887094 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-networker" (OuterVolumeSpecName: "ssh-key-openstack-networker") pod "f8ae1854-b545-42e7-acbf-c64fc343ae54" (UID: "f8ae1854-b545-42e7-acbf-c64fc343ae54"). InnerVolumeSpecName "ssh-key-openstack-networker". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.941415 4876 reconciler_common.go:293] "Volume detached for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-1\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.941461 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.941477 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-networker\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ssh-key-openstack-networker\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.941489 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.941499 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhjsb\" (UniqueName: \"kubernetes.io/projected/f8ae1854-b545-42e7-acbf-c64fc343ae54-kube-api-access-vhjsb\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:10 crc kubenswrapper[4876]: I1215 09:20:10.941510 4876 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/f8ae1854-b545-42e7-acbf-c64fc343ae54-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.312441 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-vmx88" event={"ID":"f8ae1854-b545-42e7-acbf-c64fc343ae54","Type":"ContainerDied","Data":"6b4aa986e6202c7e587d71fd2ca77090772149bb4dcea48aab65a847e01443d0"} Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.312835 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b4aa986e6202c7e587d71fd2ca77090772149bb4dcea48aab65a847e01443d0" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.312435 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-vmx88" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.405011 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-gzsfz"] Dec 15 09:20:11 crc kubenswrapper[4876]: E1215 09:20:11.405517 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerName="extract-utilities" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.405540 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerName="extract-utilities" Dec 15 09:20:11 crc kubenswrapper[4876]: E1215 09:20:11.405562 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerName="extract-content" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.405571 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerName="extract-content" Dec 15 09:20:11 crc kubenswrapper[4876]: E1215 09:20:11.405595 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ae1854-b545-42e7-acbf-c64fc343ae54" containerName="ssh-known-hosts-openstack" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.405603 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ae1854-b545-42e7-acbf-c64fc343ae54" containerName="ssh-known-hosts-openstack" Dec 15 09:20:11 crc kubenswrapper[4876]: E1215 09:20:11.405647 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerName="registry-server" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.405657 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerName="registry-server" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.405888 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ae1854-b545-42e7-acbf-c64fc343ae54" containerName="ssh-known-hosts-openstack" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.405924 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b9e51ad-4e88-453c-928f-a1d8ef15dc87" containerName="registry-server" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.406819 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.409411 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.410146 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.418524 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-gzsfz"] Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.452027 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-inventory\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.452135 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ssh-key\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.452177 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ceph\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.452201 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjbv2\" (UniqueName: \"kubernetes.io/projected/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-kube-api-access-gjbv2\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.554209 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ssh-key\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.554281 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ceph\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.554312 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjbv2\" (UniqueName: \"kubernetes.io/projected/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-kube-api-access-gjbv2\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.554928 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-inventory\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.560052 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ssh-key\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.560096 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ceph\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.560454 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-inventory\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.582598 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjbv2\" (UniqueName: \"kubernetes.io/projected/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-kube-api-access-gjbv2\") pod \"run-os-openstack-openstack-cell1-gzsfz\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:11 crc kubenswrapper[4876]: I1215 09:20:11.735472 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:12 crc kubenswrapper[4876]: I1215 09:20:12.112675 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-gzsfz"] Dec 15 09:20:12 crc kubenswrapper[4876]: I1215 09:20:12.322581 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-gzsfz" event={"ID":"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a","Type":"ContainerStarted","Data":"d7d19e43e90b02824133e86f1718df4fee737c404f390fb3a459f00f8e78a67b"} Dec 15 09:20:14 crc kubenswrapper[4876]: I1215 09:20:14.343802 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-gzsfz" event={"ID":"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a","Type":"ContainerStarted","Data":"1e70351a7b4bfaf02f556c9382872d456ddccdbbf411cd3390b516a0bfb32db0"} Dec 15 09:20:14 crc kubenswrapper[4876]: I1215 09:20:14.371521 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-gzsfz" podStartSLOduration=1.5798454739999999 podStartE2EDuration="3.371486671s" podCreationTimestamp="2025-12-15 09:20:11 +0000 UTC" firstStartedPulling="2025-12-15 09:20:12.110084356 +0000 UTC m=+8937.681227267" lastFinishedPulling="2025-12-15 09:20:13.901725553 +0000 UTC m=+8939.472868464" observedRunningTime="2025-12-15 09:20:14.36477611 +0000 UTC m=+8939.935919041" watchObservedRunningTime="2025-12-15 09:20:14.371486671 +0000 UTC m=+8939.942629572" Dec 15 09:20:17 crc kubenswrapper[4876]: I1215 09:20:17.372155 4876 generic.go:334] "Generic (PLEG): container finished" podID="8f5d7261-64ac-49b8-a081-34c57e792c7b" containerID="9c686694c51ebbbef0fcb8cf1a84556b34b228887721cb380242ca42f4ac7f7e" exitCode=0 Dec 15 09:20:17 crc kubenswrapper[4876]: I1215 09:20:17.372232 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-networker-ll6qb" event={"ID":"8f5d7261-64ac-49b8-a081-34c57e792c7b","Type":"ContainerDied","Data":"9c686694c51ebbbef0fcb8cf1a84556b34b228887721cb380242ca42f4ac7f7e"} Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.877794 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.919067 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ssh-key\") pod \"8f5d7261-64ac-49b8-a081-34c57e792c7b\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.919563 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-bootstrap-combined-ca-bundle\") pod \"8f5d7261-64ac-49b8-a081-34c57e792c7b\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.919716 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhncd\" (UniqueName: \"kubernetes.io/projected/8f5d7261-64ac-49b8-a081-34c57e792c7b-kube-api-access-rhncd\") pod \"8f5d7261-64ac-49b8-a081-34c57e792c7b\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.919940 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-inventory\") pod \"8f5d7261-64ac-49b8-a081-34c57e792c7b\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.920146 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ovn-combined-ca-bundle\") pod \"8f5d7261-64ac-49b8-a081-34c57e792c7b\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.920310 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-neutron-metadata-combined-ca-bundle\") pod \"8f5d7261-64ac-49b8-a081-34c57e792c7b\" (UID: \"8f5d7261-64ac-49b8-a081-34c57e792c7b\") " Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.925220 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "8f5d7261-64ac-49b8-a081-34c57e792c7b" (UID: "8f5d7261-64ac-49b8-a081-34c57e792c7b"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.925755 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "8f5d7261-64ac-49b8-a081-34c57e792c7b" (UID: "8f5d7261-64ac-49b8-a081-34c57e792c7b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.926957 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "8f5d7261-64ac-49b8-a081-34c57e792c7b" (UID: "8f5d7261-64ac-49b8-a081-34c57e792c7b"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.927396 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f5d7261-64ac-49b8-a081-34c57e792c7b-kube-api-access-rhncd" (OuterVolumeSpecName: "kube-api-access-rhncd") pod "8f5d7261-64ac-49b8-a081-34c57e792c7b" (UID: "8f5d7261-64ac-49b8-a081-34c57e792c7b"). InnerVolumeSpecName "kube-api-access-rhncd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.953259 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8f5d7261-64ac-49b8-a081-34c57e792c7b" (UID: "8f5d7261-64ac-49b8-a081-34c57e792c7b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:18 crc kubenswrapper[4876]: I1215 09:20:18.961275 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-inventory" (OuterVolumeSpecName: "inventory") pod "8f5d7261-64ac-49b8-a081-34c57e792c7b" (UID: "8f5d7261-64ac-49b8-a081-34c57e792c7b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.026900 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.027021 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.027218 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.027257 4876 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.027270 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhncd\" (UniqueName: \"kubernetes.io/projected/8f5d7261-64ac-49b8-a081-34c57e792c7b-kube-api-access-rhncd\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.027280 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8f5d7261-64ac-49b8-a081-34c57e792c7b-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.394234 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-networker-ll6qb" event={"ID":"8f5d7261-64ac-49b8-a081-34c57e792c7b","Type":"ContainerDied","Data":"ee577d75a1c357e0fe102af178e1716bc8e0ba36eeac058482ee95fb0c660984"} Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.394274 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee577d75a1c357e0fe102af178e1716bc8e0ba36eeac058482ee95fb0c660984" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.394332 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-networker-ll6qb" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.482528 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-networker-9lmb2"] Dec 15 09:20:19 crc kubenswrapper[4876]: E1215 09:20:19.483435 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f5d7261-64ac-49b8-a081-34c57e792c7b" containerName="install-certs-openstack-openstack-networker" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.483459 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f5d7261-64ac-49b8-a081-34c57e792c7b" containerName="install-certs-openstack-openstack-networker" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.483726 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f5d7261-64ac-49b8-a081-34c57e792c7b" containerName="install-certs-openstack-openstack-networker" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.484659 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.487769 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.487885 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.488756 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.496194 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-networker-9lmb2"] Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.535887 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmg7w\" (UniqueName: \"kubernetes.io/projected/1dab852d-2b8c-4343-9bde-ea3218464e8f-kube-api-access-wmg7w\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.535965 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-inventory\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.536004 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ssh-key\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.536137 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovncontroller-config-0\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.536178 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.637806 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-inventory\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.637883 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ssh-key\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.638035 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovncontroller-config-0\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.638087 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.638147 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmg7w\" (UniqueName: \"kubernetes.io/projected/1dab852d-2b8c-4343-9bde-ea3218464e8f-kube-api-access-wmg7w\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.639454 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovncontroller-config-0\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.642860 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-inventory\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.642875 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ssh-key\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.643715 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.672939 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmg7w\" (UniqueName: \"kubernetes.io/projected/1dab852d-2b8c-4343-9bde-ea3218464e8f-kube-api-access-wmg7w\") pod \"ovn-openstack-openstack-networker-9lmb2\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:19 crc kubenswrapper[4876]: I1215 09:20:19.816897 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:20:20 crc kubenswrapper[4876]: I1215 09:20:20.384428 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-networker-9lmb2"] Dec 15 09:20:20 crc kubenswrapper[4876]: I1215 09:20:20.408307 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-networker-9lmb2" event={"ID":"1dab852d-2b8c-4343-9bde-ea3218464e8f","Type":"ContainerStarted","Data":"dbe4407fa513af067070ebe8e8bfc2f95e94e401ff83f6ef2fa029022e29b38c"} Dec 15 09:20:21 crc kubenswrapper[4876]: I1215 09:20:21.436315 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-networker-9lmb2" event={"ID":"1dab852d-2b8c-4343-9bde-ea3218464e8f","Type":"ContainerStarted","Data":"d4f4fa911f24de74b49631feb77a5027383d10b6d0170e4f551220edcbe54c5b"} Dec 15 09:20:23 crc kubenswrapper[4876]: I1215 09:20:23.455743 4876 generic.go:334] "Generic (PLEG): container finished" podID="0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a" containerID="1e70351a7b4bfaf02f556c9382872d456ddccdbbf411cd3390b516a0bfb32db0" exitCode=0 Dec 15 09:20:23 crc kubenswrapper[4876]: I1215 09:20:23.455825 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-gzsfz" event={"ID":"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a","Type":"ContainerDied","Data":"1e70351a7b4bfaf02f556c9382872d456ddccdbbf411cd3390b516a0bfb32db0"} Dec 15 09:20:23 crc kubenswrapper[4876]: I1215 09:20:23.476868 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-networker-9lmb2" podStartSLOduration=3.853912473 podStartE2EDuration="4.476849527s" podCreationTimestamp="2025-12-15 09:20:19 +0000 UTC" firstStartedPulling="2025-12-15 09:20:20.391766956 +0000 UTC m=+8945.962909867" lastFinishedPulling="2025-12-15 09:20:21.01470401 +0000 UTC m=+8946.585846921" observedRunningTime="2025-12-15 09:20:21.457474443 +0000 UTC m=+8947.028617344" watchObservedRunningTime="2025-12-15 09:20:23.476849527 +0000 UTC m=+8949.047992438" Dec 15 09:20:24 crc kubenswrapper[4876]: I1215 09:20:24.987086 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.159061 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-inventory\") pod \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.159182 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ceph\") pod \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.159212 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjbv2\" (UniqueName: \"kubernetes.io/projected/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-kube-api-access-gjbv2\") pod \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.159309 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ssh-key\") pod \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\" (UID: \"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a\") " Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.166453 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-kube-api-access-gjbv2" (OuterVolumeSpecName: "kube-api-access-gjbv2") pod "0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a" (UID: "0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a"). InnerVolumeSpecName "kube-api-access-gjbv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.181363 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ceph" (OuterVolumeSpecName: "ceph") pod "0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a" (UID: "0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.187228 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-inventory" (OuterVolumeSpecName: "inventory") pod "0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a" (UID: "0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.225279 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a" (UID: "0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.262561 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.262597 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.262608 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjbv2\" (UniqueName: \"kubernetes.io/projected/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-kube-api-access-gjbv2\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.262617 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.478263 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-gzsfz" event={"ID":"0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a","Type":"ContainerDied","Data":"d7d19e43e90b02824133e86f1718df4fee737c404f390fb3a459f00f8e78a67b"} Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.478572 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7d19e43e90b02824133e86f1718df4fee737c404f390fb3a459f00f8e78a67b" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.478317 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-gzsfz" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.583335 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-kfc9n"] Dec 15 09:20:25 crc kubenswrapper[4876]: E1215 09:20:25.583803 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a" containerName="run-os-openstack-openstack-cell1" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.583820 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a" containerName="run-os-openstack-openstack-cell1" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.584010 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a" containerName="run-os-openstack-openstack-cell1" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.596685 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.600918 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.601317 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.615947 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-kfc9n"] Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.686896 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-inventory\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.687077 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.687197 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ceph\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.687335 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rshcn\" (UniqueName: \"kubernetes.io/projected/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-kube-api-access-rshcn\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.794701 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-inventory\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.794820 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.794886 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ceph\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.794967 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rshcn\" (UniqueName: \"kubernetes.io/projected/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-kube-api-access-rshcn\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.819413 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ceph\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.819896 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-inventory\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.820530 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.831016 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rshcn\" (UniqueName: \"kubernetes.io/projected/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-kube-api-access-rshcn\") pod \"reboot-os-openstack-openstack-cell1-kfc9n\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:25 crc kubenswrapper[4876]: I1215 09:20:25.915494 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:26 crc kubenswrapper[4876]: I1215 09:20:26.330522 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-kfc9n"] Dec 15 09:20:26 crc kubenswrapper[4876]: I1215 09:20:26.488784 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" event={"ID":"5f149f08-66bd-4f27-92f8-d2c23cce8a1e","Type":"ContainerStarted","Data":"c39bb655adb68c84652f37057246785b92adff8217e8059cd6f531bb1120bb30"} Dec 15 09:20:27 crc kubenswrapper[4876]: I1215 09:20:27.323329 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:20:27 crc kubenswrapper[4876]: I1215 09:20:27.323687 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:20:27 crc kubenswrapper[4876]: I1215 09:20:27.323733 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:20:27 crc kubenswrapper[4876]: I1215 09:20:27.324635 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"76bca0552147ae11fb9dc646aa45aa28e4f6dd0076be73075000c7f62adf0a15"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:20:27 crc kubenswrapper[4876]: I1215 09:20:27.324695 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://76bca0552147ae11fb9dc646aa45aa28e4f6dd0076be73075000c7f62adf0a15" gracePeriod=600 Dec 15 09:20:27 crc kubenswrapper[4876]: I1215 09:20:27.502067 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="76bca0552147ae11fb9dc646aa45aa28e4f6dd0076be73075000c7f62adf0a15" exitCode=0 Dec 15 09:20:27 crc kubenswrapper[4876]: I1215 09:20:27.502435 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"76bca0552147ae11fb9dc646aa45aa28e4f6dd0076be73075000c7f62adf0a15"} Dec 15 09:20:27 crc kubenswrapper[4876]: I1215 09:20:27.502470 4876 scope.go:117] "RemoveContainer" containerID="510cdaad82782ab2a064b41bc24e020cfeaaaf644c764fda4ee1c67954b4e1a3" Dec 15 09:20:28 crc kubenswrapper[4876]: I1215 09:20:28.515662 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14"} Dec 15 09:20:28 crc kubenswrapper[4876]: I1215 09:20:28.517729 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" event={"ID":"5f149f08-66bd-4f27-92f8-d2c23cce8a1e","Type":"ContainerStarted","Data":"46d0103e9658287970b1bcd7d9354a7b21371c7481575a754698f5acf2b8993c"} Dec 15 09:20:28 crc kubenswrapper[4876]: I1215 09:20:28.560513 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" podStartSLOduration=2.999172665 podStartE2EDuration="3.560493533s" podCreationTimestamp="2025-12-15 09:20:25 +0000 UTC" firstStartedPulling="2025-12-15 09:20:26.338727472 +0000 UTC m=+8951.909870383" lastFinishedPulling="2025-12-15 09:20:26.90004835 +0000 UTC m=+8952.471191251" observedRunningTime="2025-12-15 09:20:28.553522935 +0000 UTC m=+8954.124665866" watchObservedRunningTime="2025-12-15 09:20:28.560493533 +0000 UTC m=+8954.131636444" Dec 15 09:20:43 crc kubenswrapper[4876]: I1215 09:20:43.661427 4876 generic.go:334] "Generic (PLEG): container finished" podID="5f149f08-66bd-4f27-92f8-d2c23cce8a1e" containerID="46d0103e9658287970b1bcd7d9354a7b21371c7481575a754698f5acf2b8993c" exitCode=0 Dec 15 09:20:43 crc kubenswrapper[4876]: I1215 09:20:43.661533 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" event={"ID":"5f149f08-66bd-4f27-92f8-d2c23cce8a1e","Type":"ContainerDied","Data":"46d0103e9658287970b1bcd7d9354a7b21371c7481575a754698f5acf2b8993c"} Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.255789 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.435498 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rshcn\" (UniqueName: \"kubernetes.io/projected/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-kube-api-access-rshcn\") pod \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.435697 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ssh-key\") pod \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.435924 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-inventory\") pod \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.436016 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ceph\") pod \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\" (UID: \"5f149f08-66bd-4f27-92f8-d2c23cce8a1e\") " Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.683564 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" event={"ID":"5f149f08-66bd-4f27-92f8-d2c23cce8a1e","Type":"ContainerDied","Data":"c39bb655adb68c84652f37057246785b92adff8217e8059cd6f531bb1120bb30"} Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.683947 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c39bb655adb68c84652f37057246785b92adff8217e8059cd6f531bb1120bb30" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.683619 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-kfc9n" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.793543 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-4v4dc"] Dec 15 09:20:45 crc kubenswrapper[4876]: E1215 09:20:45.793994 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f149f08-66bd-4f27-92f8-d2c23cce8a1e" containerName="reboot-os-openstack-openstack-cell1" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.794009 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f149f08-66bd-4f27-92f8-d2c23cce8a1e" containerName="reboot-os-openstack-openstack-cell1" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.794215 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f149f08-66bd-4f27-92f8-d2c23cce8a1e" containerName="reboot-os-openstack-openstack-cell1" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.794935 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.812343 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-4v4dc"] Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.853796 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-kube-api-access-rshcn" (OuterVolumeSpecName: "kube-api-access-rshcn") pod "5f149f08-66bd-4f27-92f8-d2c23cce8a1e" (UID: "5f149f08-66bd-4f27-92f8-d2c23cce8a1e"). InnerVolumeSpecName "kube-api-access-rshcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.862200 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ceph" (OuterVolumeSpecName: "ceph") pod "5f149f08-66bd-4f27-92f8-d2c23cce8a1e" (UID: "5f149f08-66bd-4f27-92f8-d2c23cce8a1e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.910581 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-inventory" (OuterVolumeSpecName: "inventory") pod "5f149f08-66bd-4f27-92f8-d2c23cce8a1e" (UID: "5f149f08-66bd-4f27-92f8-d2c23cce8a1e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950255 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950299 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ssh-key\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950330 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950372 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ceph\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950391 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950424 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn77b\" (UniqueName: \"kubernetes.io/projected/3656635c-619e-44de-8647-e7d82f850931-kube-api-access-rn77b\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950473 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950493 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950531 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950565 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950584 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950615 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-inventory\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950680 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950690 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:45 crc kubenswrapper[4876]: I1215 09:20:45.950699 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rshcn\" (UniqueName: \"kubernetes.io/projected/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-kube-api-access-rshcn\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.052491 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-inventory\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.052967 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.053012 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ssh-key\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.053053 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.053177 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ceph\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.053206 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.053275 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn77b\" (UniqueName: \"kubernetes.io/projected/3656635c-619e-44de-8647-e7d82f850931-kube-api-access-rn77b\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.053415 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.053448 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.053535 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.053589 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.053640 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.071828 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5f149f08-66bd-4f27-92f8-d2c23cce8a1e" (UID: "5f149f08-66bd-4f27-92f8-d2c23cce8a1e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.072754 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.072800 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.073419 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.074141 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ceph\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.074335 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.074957 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-inventory\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.074978 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ssh-key\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.075966 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.077896 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.079034 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.084014 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.110093 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn77b\" (UniqueName: \"kubernetes.io/projected/3656635c-619e-44de-8647-e7d82f850931-kube-api-access-rn77b\") pod \"install-certs-openstack-openstack-cell1-4v4dc\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.120718 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:20:46 crc kubenswrapper[4876]: I1215 09:20:46.159596 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f149f08-66bd-4f27-92f8-d2c23cce8a1e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:20:47 crc kubenswrapper[4876]: I1215 09:20:46.749070 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-4v4dc"] Dec 15 09:20:47 crc kubenswrapper[4876]: I1215 09:20:47.702268 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" event={"ID":"3656635c-619e-44de-8647-e7d82f850931","Type":"ContainerStarted","Data":"c818b33f98b20f50b5ab69fb349d8745a9e1f20e59870420d434bcb339d7a8b3"} Dec 15 09:20:48 crc kubenswrapper[4876]: I1215 09:20:48.720017 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" event={"ID":"3656635c-619e-44de-8647-e7d82f850931","Type":"ContainerStarted","Data":"743a22167888a5a159cb01d6febd96be99bfaed05cbf09268cf1741d1d6a6c71"} Dec 15 09:21:10 crc kubenswrapper[4876]: I1215 09:21:10.921335 4876 generic.go:334] "Generic (PLEG): container finished" podID="3656635c-619e-44de-8647-e7d82f850931" containerID="743a22167888a5a159cb01d6febd96be99bfaed05cbf09268cf1741d1d6a6c71" exitCode=0 Dec 15 09:21:10 crc kubenswrapper[4876]: I1215 09:21:10.921408 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" event={"ID":"3656635c-619e-44de-8647-e7d82f850931","Type":"ContainerDied","Data":"743a22167888a5a159cb01d6febd96be99bfaed05cbf09268cf1741d1d6a6c71"} Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.547832 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.657481 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ceph\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.657538 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-nova-combined-ca-bundle\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.657587 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-inventory\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.657642 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-telemetry-combined-ca-bundle\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.657763 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-dhcp-combined-ca-bundle\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.657803 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-bootstrap-combined-ca-bundle\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.657835 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-sriov-combined-ca-bundle\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.657862 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-libvirt-combined-ca-bundle\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.657908 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-metadata-combined-ca-bundle\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.657971 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ssh-key\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.658075 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rn77b\" (UniqueName: \"kubernetes.io/projected/3656635c-619e-44de-8647-e7d82f850931-kube-api-access-rn77b\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.658182 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ovn-combined-ca-bundle\") pod \"3656635c-619e-44de-8647-e7d82f850931\" (UID: \"3656635c-619e-44de-8647-e7d82f850931\") " Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.663708 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3656635c-619e-44de-8647-e7d82f850931-kube-api-access-rn77b" (OuterVolumeSpecName: "kube-api-access-rn77b") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "kube-api-access-rn77b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.664212 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.664291 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.665222 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.666134 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.666203 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.666307 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.666640 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.666702 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.667380 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ceph" (OuterVolumeSpecName: "ceph") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.693011 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.697124 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-inventory" (OuterVolumeSpecName: "inventory") pod "3656635c-619e-44de-8647-e7d82f850931" (UID: "3656635c-619e-44de-8647-e7d82f850931"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.760950 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.760996 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.761010 4876 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.761022 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.761034 4876 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.761046 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.761060 4876 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.761073 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.761084 4876 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.761098 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.761125 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3656635c-619e-44de-8647-e7d82f850931-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.761136 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn77b\" (UniqueName: \"kubernetes.io/projected/3656635c-619e-44de-8647-e7d82f850931-kube-api-access-rn77b\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.944414 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" event={"ID":"3656635c-619e-44de-8647-e7d82f850931","Type":"ContainerDied","Data":"c818b33f98b20f50b5ab69fb349d8745a9e1f20e59870420d434bcb339d7a8b3"} Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.944732 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c818b33f98b20f50b5ab69fb349d8745a9e1f20e59870420d434bcb339d7a8b3" Dec 15 09:21:12 crc kubenswrapper[4876]: I1215 09:21:12.944478 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-4v4dc" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.064344 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-tmvmx"] Dec 15 09:21:13 crc kubenswrapper[4876]: E1215 09:21:13.064818 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3656635c-619e-44de-8647-e7d82f850931" containerName="install-certs-openstack-openstack-cell1" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.064840 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="3656635c-619e-44de-8647-e7d82f850931" containerName="install-certs-openstack-openstack-cell1" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.065062 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="3656635c-619e-44de-8647-e7d82f850931" containerName="install-certs-openstack-openstack-cell1" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.065773 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.071026 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.071412 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.081594 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-tmvmx"] Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.171056 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcsdv\" (UniqueName: \"kubernetes.io/projected/92849ab3-8a5c-48e4-9426-7a5e49371db3-kube-api-access-hcsdv\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.171167 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ceph\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.171197 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.171376 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-inventory\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.273859 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-inventory\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.274032 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcsdv\" (UniqueName: \"kubernetes.io/projected/92849ab3-8a5c-48e4-9426-7a5e49371db3-kube-api-access-hcsdv\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.274092 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ceph\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.274136 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.279735 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.282916 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ceph\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.283497 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-inventory\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.291518 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcsdv\" (UniqueName: \"kubernetes.io/projected/92849ab3-8a5c-48e4-9426-7a5e49371db3-kube-api-access-hcsdv\") pod \"ceph-client-openstack-openstack-cell1-tmvmx\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.389006 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.906847 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-tmvmx"] Dec 15 09:21:13 crc kubenswrapper[4876]: I1215 09:21:13.954613 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" event={"ID":"92849ab3-8a5c-48e4-9426-7a5e49371db3","Type":"ContainerStarted","Data":"7d7b1ad0aaf3fe23802f381e775d21180c5e4ccbb8b2dc412b8d88e949d78817"} Dec 15 09:21:14 crc kubenswrapper[4876]: I1215 09:21:14.967246 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" event={"ID":"92849ab3-8a5c-48e4-9426-7a5e49371db3","Type":"ContainerStarted","Data":"8d011458bc163c39eb03588eac893f8119a5c3f77880f52c55fd2b5757301d32"} Dec 15 09:21:14 crc kubenswrapper[4876]: I1215 09:21:14.985938 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" podStartSLOduration=1.492154759 podStartE2EDuration="1.985910862s" podCreationTimestamp="2025-12-15 09:21:13 +0000 UTC" firstStartedPulling="2025-12-15 09:21:13.923911075 +0000 UTC m=+8999.495053986" lastFinishedPulling="2025-12-15 09:21:14.417667178 +0000 UTC m=+8999.988810089" observedRunningTime="2025-12-15 09:21:14.980917358 +0000 UTC m=+9000.552060269" watchObservedRunningTime="2025-12-15 09:21:14.985910862 +0000 UTC m=+9000.557053783" Dec 15 09:21:21 crc kubenswrapper[4876]: I1215 09:21:21.037852 4876 generic.go:334] "Generic (PLEG): container finished" podID="92849ab3-8a5c-48e4-9426-7a5e49371db3" containerID="8d011458bc163c39eb03588eac893f8119a5c3f77880f52c55fd2b5757301d32" exitCode=0 Dec 15 09:21:21 crc kubenswrapper[4876]: I1215 09:21:21.037934 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" event={"ID":"92849ab3-8a5c-48e4-9426-7a5e49371db3","Type":"ContainerDied","Data":"8d011458bc163c39eb03588eac893f8119a5c3f77880f52c55fd2b5757301d32"} Dec 15 09:21:21 crc kubenswrapper[4876]: E1215 09:21:21.216882 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92849ab3_8a5c_48e4_9426_7a5e49371db3.slice/crio-8d011458bc163c39eb03588eac893f8119a5c3f77880f52c55fd2b5757301d32.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92849ab3_8a5c_48e4_9426_7a5e49371db3.slice/crio-conmon-8d011458bc163c39eb03588eac893f8119a5c3f77880f52c55fd2b5757301d32.scope\": RecentStats: unable to find data in memory cache]" Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.538966 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.623445 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-inventory\") pod \"92849ab3-8a5c-48e4-9426-7a5e49371db3\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.623510 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ceph\") pod \"92849ab3-8a5c-48e4-9426-7a5e49371db3\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.623575 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcsdv\" (UniqueName: \"kubernetes.io/projected/92849ab3-8a5c-48e4-9426-7a5e49371db3-kube-api-access-hcsdv\") pod \"92849ab3-8a5c-48e4-9426-7a5e49371db3\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.623679 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ssh-key\") pod \"92849ab3-8a5c-48e4-9426-7a5e49371db3\" (UID: \"92849ab3-8a5c-48e4-9426-7a5e49371db3\") " Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.632790 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92849ab3-8a5c-48e4-9426-7a5e49371db3-kube-api-access-hcsdv" (OuterVolumeSpecName: "kube-api-access-hcsdv") pod "92849ab3-8a5c-48e4-9426-7a5e49371db3" (UID: "92849ab3-8a5c-48e4-9426-7a5e49371db3"). InnerVolumeSpecName "kube-api-access-hcsdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.634470 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ceph" (OuterVolumeSpecName: "ceph") pod "92849ab3-8a5c-48e4-9426-7a5e49371db3" (UID: "92849ab3-8a5c-48e4-9426-7a5e49371db3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.668320 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-inventory" (OuterVolumeSpecName: "inventory") pod "92849ab3-8a5c-48e4-9426-7a5e49371db3" (UID: "92849ab3-8a5c-48e4-9426-7a5e49371db3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.703086 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "92849ab3-8a5c-48e4-9426-7a5e49371db3" (UID: "92849ab3-8a5c-48e4-9426-7a5e49371db3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.726244 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.726283 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.726292 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcsdv\" (UniqueName: \"kubernetes.io/projected/92849ab3-8a5c-48e4-9426-7a5e49371db3-kube-api-access-hcsdv\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:22 crc kubenswrapper[4876]: I1215 09:21:22.726303 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92849ab3-8a5c-48e4-9426-7a5e49371db3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.057830 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" event={"ID":"92849ab3-8a5c-48e4-9426-7a5e49371db3","Type":"ContainerDied","Data":"7d7b1ad0aaf3fe23802f381e775d21180c5e4ccbb8b2dc412b8d88e949d78817"} Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.058243 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d7b1ad0aaf3fe23802f381e775d21180c5e4ccbb8b2dc412b8d88e949d78817" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.057887 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-tmvmx" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.148622 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-ls542"] Dec 15 09:21:23 crc kubenswrapper[4876]: E1215 09:21:23.149135 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92849ab3-8a5c-48e4-9426-7a5e49371db3" containerName="ceph-client-openstack-openstack-cell1" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.149156 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="92849ab3-8a5c-48e4-9426-7a5e49371db3" containerName="ceph-client-openstack-openstack-cell1" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.149412 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="92849ab3-8a5c-48e4-9426-7a5e49371db3" containerName="ceph-client-openstack-openstack-cell1" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.150400 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.156377 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.156779 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.162332 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-ls542"] Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.236455 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvl5x\" (UniqueName: \"kubernetes.io/projected/4ef9be2e-a2e2-4004-b6f9-7872140a2422-kube-api-access-hvl5x\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.236792 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ssh-key\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.236911 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.237046 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.237190 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-inventory\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.237339 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ceph\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.338717 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ceph\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.338832 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvl5x\" (UniqueName: \"kubernetes.io/projected/4ef9be2e-a2e2-4004-b6f9-7872140a2422-kube-api-access-hvl5x\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.338880 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ssh-key\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.338903 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.338954 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.338998 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-inventory\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.340359 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.344454 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-inventory\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.344479 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ssh-key\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.345288 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.346606 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ceph\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.358753 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvl5x\" (UniqueName: \"kubernetes.io/projected/4ef9be2e-a2e2-4004-b6f9-7872140a2422-kube-api-access-hvl5x\") pod \"ovn-openstack-openstack-cell1-ls542\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:23 crc kubenswrapper[4876]: I1215 09:21:23.477986 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:21:24 crc kubenswrapper[4876]: I1215 09:21:24.214708 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-ls542"] Dec 15 09:21:25 crc kubenswrapper[4876]: I1215 09:21:25.075764 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-ls542" event={"ID":"4ef9be2e-a2e2-4004-b6f9-7872140a2422","Type":"ContainerStarted","Data":"280265b5e86d9491d111ce6bdab6290dd7f6227cc8207b6021f53de781131a46"} Dec 15 09:21:26 crc kubenswrapper[4876]: I1215 09:21:26.086626 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-ls542" event={"ID":"4ef9be2e-a2e2-4004-b6f9-7872140a2422","Type":"ContainerStarted","Data":"21158036d667b6364e86f394e346d5dd2a176881eeaf25330e2b6314b90870fd"} Dec 15 09:21:26 crc kubenswrapper[4876]: I1215 09:21:26.105374 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-ls542" podStartSLOduration=1.896375325 podStartE2EDuration="3.10535567s" podCreationTimestamp="2025-12-15 09:21:23 +0000 UTC" firstStartedPulling="2025-12-15 09:21:24.229477061 +0000 UTC m=+9009.800619972" lastFinishedPulling="2025-12-15 09:21:25.438457406 +0000 UTC m=+9011.009600317" observedRunningTime="2025-12-15 09:21:26.104037854 +0000 UTC m=+9011.675180765" watchObservedRunningTime="2025-12-15 09:21:26.10535567 +0000 UTC m=+9011.676498581" Dec 15 09:21:49 crc kubenswrapper[4876]: I1215 09:21:49.306937 4876 generic.go:334] "Generic (PLEG): container finished" podID="1dab852d-2b8c-4343-9bde-ea3218464e8f" containerID="d4f4fa911f24de74b49631feb77a5027383d10b6d0170e4f551220edcbe54c5b" exitCode=0 Dec 15 09:21:49 crc kubenswrapper[4876]: I1215 09:21:49.307178 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-networker-9lmb2" event={"ID":"1dab852d-2b8c-4343-9bde-ea3218464e8f","Type":"ContainerDied","Data":"d4f4fa911f24de74b49631feb77a5027383d10b6d0170e4f551220edcbe54c5b"} Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.781603 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.812190 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ssh-key\") pod \"1dab852d-2b8c-4343-9bde-ea3218464e8f\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.812260 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-inventory\") pod \"1dab852d-2b8c-4343-9bde-ea3218464e8f\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.812360 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovncontroller-config-0\") pod \"1dab852d-2b8c-4343-9bde-ea3218464e8f\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.812382 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovn-combined-ca-bundle\") pod \"1dab852d-2b8c-4343-9bde-ea3218464e8f\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.812595 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmg7w\" (UniqueName: \"kubernetes.io/projected/1dab852d-2b8c-4343-9bde-ea3218464e8f-kube-api-access-wmg7w\") pod \"1dab852d-2b8c-4343-9bde-ea3218464e8f\" (UID: \"1dab852d-2b8c-4343-9bde-ea3218464e8f\") " Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.827134 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "1dab852d-2b8c-4343-9bde-ea3218464e8f" (UID: "1dab852d-2b8c-4343-9bde-ea3218464e8f"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.828461 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dab852d-2b8c-4343-9bde-ea3218464e8f-kube-api-access-wmg7w" (OuterVolumeSpecName: "kube-api-access-wmg7w") pod "1dab852d-2b8c-4343-9bde-ea3218464e8f" (UID: "1dab852d-2b8c-4343-9bde-ea3218464e8f"). InnerVolumeSpecName "kube-api-access-wmg7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.847649 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "1dab852d-2b8c-4343-9bde-ea3218464e8f" (UID: "1dab852d-2b8c-4343-9bde-ea3218464e8f"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.850205 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1dab852d-2b8c-4343-9bde-ea3218464e8f" (UID: "1dab852d-2b8c-4343-9bde-ea3218464e8f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.855198 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-inventory" (OuterVolumeSpecName: "inventory") pod "1dab852d-2b8c-4343-9bde-ea3218464e8f" (UID: "1dab852d-2b8c-4343-9bde-ea3218464e8f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.916020 4876 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.916090 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.916127 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmg7w\" (UniqueName: \"kubernetes.io/projected/1dab852d-2b8c-4343-9bde-ea3218464e8f-kube-api-access-wmg7w\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.916149 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:50 crc kubenswrapper[4876]: I1215 09:21:50.916165 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1dab852d-2b8c-4343-9bde-ea3218464e8f-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.326957 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-networker-9lmb2" event={"ID":"1dab852d-2b8c-4343-9bde-ea3218464e8f","Type":"ContainerDied","Data":"dbe4407fa513af067070ebe8e8bfc2f95e94e401ff83f6ef2fa029022e29b38c"} Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.327012 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbe4407fa513af067070ebe8e8bfc2f95e94e401ff83f6ef2fa029022e29b38c" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.327022 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-networker-9lmb2" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.424933 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-networker-t2m59"] Dec 15 09:21:51 crc kubenswrapper[4876]: E1215 09:21:51.426074 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dab852d-2b8c-4343-9bde-ea3218464e8f" containerName="ovn-openstack-openstack-networker" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.426122 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dab852d-2b8c-4343-9bde-ea3218464e8f" containerName="ovn-openstack-openstack-networker" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.426535 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dab852d-2b8c-4343-9bde-ea3218464e8f" containerName="ovn-openstack-openstack-networker" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.434962 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.437266 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.443149 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-networker-dockercfg-vrsms" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.443166 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-networker" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.443256 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.466307 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-networker-t2m59"] Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.537946 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.538032 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-ssh-key\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.538128 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.538170 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-inventory\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.538200 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.538308 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qgpr\" (UniqueName: \"kubernetes.io/projected/946096c8-3935-4428-ad25-7c10e755784d-kube-api-access-2qgpr\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.640074 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.640221 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-ssh-key\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.640251 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.640283 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-inventory\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.640305 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.640375 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qgpr\" (UniqueName: \"kubernetes.io/projected/946096c8-3935-4428-ad25-7c10e755784d-kube-api-access-2qgpr\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.647071 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.647373 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.649326 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.651437 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-ssh-key\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.651983 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-inventory\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.664213 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qgpr\" (UniqueName: \"kubernetes.io/projected/946096c8-3935-4428-ad25-7c10e755784d-kube-api-access-2qgpr\") pod \"neutron-metadata-openstack-openstack-networker-t2m59\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:51 crc kubenswrapper[4876]: I1215 09:21:51.762693 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:21:52 crc kubenswrapper[4876]: I1215 09:21:52.290453 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-networker-t2m59"] Dec 15 09:21:52 crc kubenswrapper[4876]: I1215 09:21:52.338975 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" event={"ID":"946096c8-3935-4428-ad25-7c10e755784d","Type":"ContainerStarted","Data":"d85b0e1fab78911274a6a46d7bd8b0b15714309b9e1beef81a2e6a118f1dccfb"} Dec 15 09:21:54 crc kubenswrapper[4876]: I1215 09:21:54.359253 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" event={"ID":"946096c8-3935-4428-ad25-7c10e755784d","Type":"ContainerStarted","Data":"b956bdd49781080681d5d1b5903bcce33ea72c70be9d3b2cab185fc261127f72"} Dec 15 09:21:54 crc kubenswrapper[4876]: I1215 09:21:54.392451 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" podStartSLOduration=1.976506267 podStartE2EDuration="3.392429411s" podCreationTimestamp="2025-12-15 09:21:51 +0000 UTC" firstStartedPulling="2025-12-15 09:21:52.292578516 +0000 UTC m=+9037.863721427" lastFinishedPulling="2025-12-15 09:21:53.70850166 +0000 UTC m=+9039.279644571" observedRunningTime="2025-12-15 09:21:54.378426905 +0000 UTC m=+9039.949569816" watchObservedRunningTime="2025-12-15 09:21:54.392429411 +0000 UTC m=+9039.963572332" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.061079 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gt9qp"] Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.064315 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.083130 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gt9qp"] Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.123325 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-utilities\") pod \"redhat-operators-gt9qp\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.123410 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-catalog-content\") pod \"redhat-operators-gt9qp\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.123573 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz6ck\" (UniqueName: \"kubernetes.io/projected/77d58525-fae9-4e3d-b5a9-44baf19eeccb-kube-api-access-hz6ck\") pod \"redhat-operators-gt9qp\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.225176 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz6ck\" (UniqueName: \"kubernetes.io/projected/77d58525-fae9-4e3d-b5a9-44baf19eeccb-kube-api-access-hz6ck\") pod \"redhat-operators-gt9qp\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.225261 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-utilities\") pod \"redhat-operators-gt9qp\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.225302 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-catalog-content\") pod \"redhat-operators-gt9qp\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.225854 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-utilities\") pod \"redhat-operators-gt9qp\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.225863 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-catalog-content\") pod \"redhat-operators-gt9qp\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.246079 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz6ck\" (UniqueName: \"kubernetes.io/projected/77d58525-fae9-4e3d-b5a9-44baf19eeccb-kube-api-access-hz6ck\") pod \"redhat-operators-gt9qp\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.386684 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:21:55 crc kubenswrapper[4876]: I1215 09:21:55.887527 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gt9qp"] Dec 15 09:21:56 crc kubenswrapper[4876]: I1215 09:21:56.378921 4876 generic.go:334] "Generic (PLEG): container finished" podID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerID="d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9" exitCode=0 Dec 15 09:21:56 crc kubenswrapper[4876]: I1215 09:21:56.378986 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gt9qp" event={"ID":"77d58525-fae9-4e3d-b5a9-44baf19eeccb","Type":"ContainerDied","Data":"d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9"} Dec 15 09:21:56 crc kubenswrapper[4876]: I1215 09:21:56.379047 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gt9qp" event={"ID":"77d58525-fae9-4e3d-b5a9-44baf19eeccb","Type":"ContainerStarted","Data":"fadb5e57ac82e774e0632d2c11473dbde93e9270c367f32e5da9a85021894d0d"} Dec 15 09:21:58 crc kubenswrapper[4876]: I1215 09:21:58.401798 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gt9qp" event={"ID":"77d58525-fae9-4e3d-b5a9-44baf19eeccb","Type":"ContainerStarted","Data":"e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be"} Dec 15 09:22:01 crc kubenswrapper[4876]: I1215 09:22:01.435659 4876 generic.go:334] "Generic (PLEG): container finished" podID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerID="e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be" exitCode=0 Dec 15 09:22:01 crc kubenswrapper[4876]: I1215 09:22:01.435728 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gt9qp" event={"ID":"77d58525-fae9-4e3d-b5a9-44baf19eeccb","Type":"ContainerDied","Data":"e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be"} Dec 15 09:22:03 crc kubenswrapper[4876]: I1215 09:22:03.468463 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gt9qp" event={"ID":"77d58525-fae9-4e3d-b5a9-44baf19eeccb","Type":"ContainerStarted","Data":"6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8"} Dec 15 09:22:03 crc kubenswrapper[4876]: I1215 09:22:03.490852 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gt9qp" podStartSLOduration=2.286360538 podStartE2EDuration="8.490826432s" podCreationTimestamp="2025-12-15 09:21:55 +0000 UTC" firstStartedPulling="2025-12-15 09:21:56.381915732 +0000 UTC m=+9041.953058643" lastFinishedPulling="2025-12-15 09:22:02.586381626 +0000 UTC m=+9048.157524537" observedRunningTime="2025-12-15 09:22:03.489433534 +0000 UTC m=+9049.060576445" watchObservedRunningTime="2025-12-15 09:22:03.490826432 +0000 UTC m=+9049.061969343" Dec 15 09:22:05 crc kubenswrapper[4876]: I1215 09:22:05.387662 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:22:05 crc kubenswrapper[4876]: I1215 09:22:05.388022 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:22:06 crc kubenswrapper[4876]: I1215 09:22:06.436821 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gt9qp" podUID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerName="registry-server" probeResult="failure" output=< Dec 15 09:22:06 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 09:22:06 crc kubenswrapper[4876]: > Dec 15 09:22:15 crc kubenswrapper[4876]: I1215 09:22:15.444592 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:22:15 crc kubenswrapper[4876]: I1215 09:22:15.500158 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:22:18 crc kubenswrapper[4876]: I1215 09:22:18.938885 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gt9qp"] Dec 15 09:22:18 crc kubenswrapper[4876]: I1215 09:22:18.939433 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gt9qp" podUID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerName="registry-server" containerID="cri-o://6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8" gracePeriod=2 Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.450534 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.533465 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hz6ck\" (UniqueName: \"kubernetes.io/projected/77d58525-fae9-4e3d-b5a9-44baf19eeccb-kube-api-access-hz6ck\") pod \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.533900 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-utilities\") pod \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.533999 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-catalog-content\") pod \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\" (UID: \"77d58525-fae9-4e3d-b5a9-44baf19eeccb\") " Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.534656 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-utilities" (OuterVolumeSpecName: "utilities") pod "77d58525-fae9-4e3d-b5a9-44baf19eeccb" (UID: "77d58525-fae9-4e3d-b5a9-44baf19eeccb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.540055 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77d58525-fae9-4e3d-b5a9-44baf19eeccb-kube-api-access-hz6ck" (OuterVolumeSpecName: "kube-api-access-hz6ck") pod "77d58525-fae9-4e3d-b5a9-44baf19eeccb" (UID: "77d58525-fae9-4e3d-b5a9-44baf19eeccb"). InnerVolumeSpecName "kube-api-access-hz6ck". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.618744 4876 generic.go:334] "Generic (PLEG): container finished" podID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerID="6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8" exitCode=0 Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.618813 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gt9qp" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.618797 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gt9qp" event={"ID":"77d58525-fae9-4e3d-b5a9-44baf19eeccb","Type":"ContainerDied","Data":"6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8"} Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.618975 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gt9qp" event={"ID":"77d58525-fae9-4e3d-b5a9-44baf19eeccb","Type":"ContainerDied","Data":"fadb5e57ac82e774e0632d2c11473dbde93e9270c367f32e5da9a85021894d0d"} Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.619016 4876 scope.go:117] "RemoveContainer" containerID="6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.635956 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hz6ck\" (UniqueName: \"kubernetes.io/projected/77d58525-fae9-4e3d-b5a9-44baf19eeccb-kube-api-access-hz6ck\") on node \"crc\" DevicePath \"\"" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.635989 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.642712 4876 scope.go:117] "RemoveContainer" containerID="e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.651201 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "77d58525-fae9-4e3d-b5a9-44baf19eeccb" (UID: "77d58525-fae9-4e3d-b5a9-44baf19eeccb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.665341 4876 scope.go:117] "RemoveContainer" containerID="d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.710256 4876 scope.go:117] "RemoveContainer" containerID="6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8" Dec 15 09:22:19 crc kubenswrapper[4876]: E1215 09:22:19.710821 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8\": container with ID starting with 6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8 not found: ID does not exist" containerID="6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.710891 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8"} err="failed to get container status \"6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8\": rpc error: code = NotFound desc = could not find container \"6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8\": container with ID starting with 6c056347c55902522fc605e732c52e5187a0441fe21d620c21fb214f037c7ae8 not found: ID does not exist" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.710925 4876 scope.go:117] "RemoveContainer" containerID="e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be" Dec 15 09:22:19 crc kubenswrapper[4876]: E1215 09:22:19.711259 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be\": container with ID starting with e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be not found: ID does not exist" containerID="e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.711288 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be"} err="failed to get container status \"e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be\": rpc error: code = NotFound desc = could not find container \"e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be\": container with ID starting with e0a23ad62cc5d290cea78de9e3f63e51aae4e9305868ff9d13ff38b115b291be not found: ID does not exist" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.711309 4876 scope.go:117] "RemoveContainer" containerID="d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9" Dec 15 09:22:19 crc kubenswrapper[4876]: E1215 09:22:19.711524 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9\": container with ID starting with d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9 not found: ID does not exist" containerID="d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.711546 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9"} err="failed to get container status \"d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9\": rpc error: code = NotFound desc = could not find container \"d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9\": container with ID starting with d844db7a26bfd314d9c4f82caa5a4a6707377b04fff73b5fc1a1e53c98cf83b9 not found: ID does not exist" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.738291 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77d58525-fae9-4e3d-b5a9-44baf19eeccb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.955246 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gt9qp"] Dec 15 09:22:19 crc kubenswrapper[4876]: I1215 09:22:19.966557 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gt9qp"] Dec 15 09:22:20 crc kubenswrapper[4876]: I1215 09:22:20.718203 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" path="/var/lib/kubelet/pods/77d58525-fae9-4e3d-b5a9-44baf19eeccb/volumes" Dec 15 09:22:27 crc kubenswrapper[4876]: I1215 09:22:27.322976 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:22:27 crc kubenswrapper[4876]: I1215 09:22:27.323543 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:22:45 crc kubenswrapper[4876]: I1215 09:22:45.907042 4876 generic.go:334] "Generic (PLEG): container finished" podID="4ef9be2e-a2e2-4004-b6f9-7872140a2422" containerID="21158036d667b6364e86f394e346d5dd2a176881eeaf25330e2b6314b90870fd" exitCode=0 Dec 15 09:22:45 crc kubenswrapper[4876]: I1215 09:22:45.907280 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-ls542" event={"ID":"4ef9be2e-a2e2-4004-b6f9-7872140a2422","Type":"ContainerDied","Data":"21158036d667b6364e86f394e346d5dd2a176881eeaf25330e2b6314b90870fd"} Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.430230 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.555746 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-inventory\") pod \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.555839 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovn-combined-ca-bundle\") pod \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.555894 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ssh-key\") pod \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.555976 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ceph\") pod \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.555995 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvl5x\" (UniqueName: \"kubernetes.io/projected/4ef9be2e-a2e2-4004-b6f9-7872140a2422-kube-api-access-hvl5x\") pod \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.556119 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovncontroller-config-0\") pod \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\" (UID: \"4ef9be2e-a2e2-4004-b6f9-7872140a2422\") " Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.562284 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "4ef9be2e-a2e2-4004-b6f9-7872140a2422" (UID: "4ef9be2e-a2e2-4004-b6f9-7872140a2422"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.563298 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ceph" (OuterVolumeSpecName: "ceph") pod "4ef9be2e-a2e2-4004-b6f9-7872140a2422" (UID: "4ef9be2e-a2e2-4004-b6f9-7872140a2422"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.563711 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ef9be2e-a2e2-4004-b6f9-7872140a2422-kube-api-access-hvl5x" (OuterVolumeSpecName: "kube-api-access-hvl5x") pod "4ef9be2e-a2e2-4004-b6f9-7872140a2422" (UID: "4ef9be2e-a2e2-4004-b6f9-7872140a2422"). InnerVolumeSpecName "kube-api-access-hvl5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.590412 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4ef9be2e-a2e2-4004-b6f9-7872140a2422" (UID: "4ef9be2e-a2e2-4004-b6f9-7872140a2422"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.591325 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-inventory" (OuterVolumeSpecName: "inventory") pod "4ef9be2e-a2e2-4004-b6f9-7872140a2422" (UID: "4ef9be2e-a2e2-4004-b6f9-7872140a2422"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.607355 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "4ef9be2e-a2e2-4004-b6f9-7872140a2422" (UID: "4ef9be2e-a2e2-4004-b6f9-7872140a2422"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.658811 4876 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.658858 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.658876 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.658887 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.658897 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4ef9be2e-a2e2-4004-b6f9-7872140a2422-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.658908 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvl5x\" (UniqueName: \"kubernetes.io/projected/4ef9be2e-a2e2-4004-b6f9-7872140a2422-kube-api-access-hvl5x\") on node \"crc\" DevicePath \"\"" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.935659 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-ls542" event={"ID":"4ef9be2e-a2e2-4004-b6f9-7872140a2422","Type":"ContainerDied","Data":"280265b5e86d9491d111ce6bdab6290dd7f6227cc8207b6021f53de781131a46"} Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.935718 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="280265b5e86d9491d111ce6bdab6290dd7f6227cc8207b6021f53de781131a46" Dec 15 09:22:47 crc kubenswrapper[4876]: I1215 09:22:47.935791 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-ls542" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.041249 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-xrzd5"] Dec 15 09:22:48 crc kubenswrapper[4876]: E1215 09:22:48.058410 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerName="registry-server" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.058790 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerName="registry-server" Dec 15 09:22:48 crc kubenswrapper[4876]: E1215 09:22:48.058874 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ef9be2e-a2e2-4004-b6f9-7872140a2422" containerName="ovn-openstack-openstack-cell1" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.058925 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ef9be2e-a2e2-4004-b6f9-7872140a2422" containerName="ovn-openstack-openstack-cell1" Dec 15 09:22:48 crc kubenswrapper[4876]: E1215 09:22:48.059043 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerName="extract-utilities" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.059134 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerName="extract-utilities" Dec 15 09:22:48 crc kubenswrapper[4876]: E1215 09:22:48.059218 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerName="extract-content" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.059322 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerName="extract-content" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.059921 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ef9be2e-a2e2-4004-b6f9-7872140a2422" containerName="ovn-openstack-openstack-cell1" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.060004 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="77d58525-fae9-4e3d-b5a9-44baf19eeccb" containerName="registry-server" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.061325 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-xrzd5"] Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.061594 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.067905 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.068621 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.170644 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh949\" (UniqueName: \"kubernetes.io/projected/75af2aac-a703-42f7-a933-4f2bac4af01f-kube-api-access-fh949\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.170700 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.170744 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.170838 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.171012 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.171065 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.171269 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.273397 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.273504 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.273540 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.273638 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.273703 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh949\" (UniqueName: \"kubernetes.io/projected/75af2aac-a703-42f7-a933-4f2bac4af01f-kube-api-access-fh949\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.273748 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.273776 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.278517 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.278589 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.279372 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.279525 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.280234 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.286943 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.304806 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh949\" (UniqueName: \"kubernetes.io/projected/75af2aac-a703-42f7-a933-4f2bac4af01f-kube-api-access-fh949\") pod \"neutron-metadata-openstack-openstack-cell1-xrzd5\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:48 crc kubenswrapper[4876]: I1215 09:22:48.388007 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:22:49 crc kubenswrapper[4876]: I1215 09:22:49.271375 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-xrzd5"] Dec 15 09:22:49 crc kubenswrapper[4876]: I1215 09:22:49.967506 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" event={"ID":"75af2aac-a703-42f7-a933-4f2bac4af01f","Type":"ContainerStarted","Data":"0c7c7dbde56d0a5cd3f5161493d084d637afe8fdad1106105ad9ce43b5d2252c"} Dec 15 09:22:50 crc kubenswrapper[4876]: I1215 09:22:50.980091 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" event={"ID":"75af2aac-a703-42f7-a933-4f2bac4af01f","Type":"ContainerStarted","Data":"d498ef1be0826dbc8d499724e4e46e0ea0f980135ef92f60382591ec3905f310"} Dec 15 09:22:51 crc kubenswrapper[4876]: I1215 09:22:51.002444 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" podStartSLOduration=1.746241225 podStartE2EDuration="3.002426598s" podCreationTimestamp="2025-12-15 09:22:48 +0000 UTC" firstStartedPulling="2025-12-15 09:22:49.271617386 +0000 UTC m=+9094.842760297" lastFinishedPulling="2025-12-15 09:22:50.527802769 +0000 UTC m=+9096.098945670" observedRunningTime="2025-12-15 09:22:51.001917355 +0000 UTC m=+9096.573060286" watchObservedRunningTime="2025-12-15 09:22:51.002426598 +0000 UTC m=+9096.573569509" Dec 15 09:22:57 crc kubenswrapper[4876]: I1215 09:22:57.322476 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:22:57 crc kubenswrapper[4876]: I1215 09:22:57.323347 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:23:03 crc kubenswrapper[4876]: I1215 09:23:03.086467 4876 generic.go:334] "Generic (PLEG): container finished" podID="946096c8-3935-4428-ad25-7c10e755784d" containerID="b956bdd49781080681d5d1b5903bcce33ea72c70be9d3b2cab185fc261127f72" exitCode=0 Dec 15 09:23:03 crc kubenswrapper[4876]: I1215 09:23:03.086566 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" event={"ID":"946096c8-3935-4428-ad25-7c10e755784d","Type":"ContainerDied","Data":"b956bdd49781080681d5d1b5903bcce33ea72c70be9d3b2cab185fc261127f72"} Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.685814 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.843083 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-inventory\") pod \"946096c8-3935-4428-ad25-7c10e755784d\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.843203 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qgpr\" (UniqueName: \"kubernetes.io/projected/946096c8-3935-4428-ad25-7c10e755784d-kube-api-access-2qgpr\") pod \"946096c8-3935-4428-ad25-7c10e755784d\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.843290 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"946096c8-3935-4428-ad25-7c10e755784d\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.843367 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-nova-metadata-neutron-config-0\") pod \"946096c8-3935-4428-ad25-7c10e755784d\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.843408 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-ssh-key\") pod \"946096c8-3935-4428-ad25-7c10e755784d\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.843510 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-metadata-combined-ca-bundle\") pod \"946096c8-3935-4428-ad25-7c10e755784d\" (UID: \"946096c8-3935-4428-ad25-7c10e755784d\") " Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.891357 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "946096c8-3935-4428-ad25-7c10e755784d" (UID: "946096c8-3935-4428-ad25-7c10e755784d"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.891408 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/946096c8-3935-4428-ad25-7c10e755784d-kube-api-access-2qgpr" (OuterVolumeSpecName: "kube-api-access-2qgpr") pod "946096c8-3935-4428-ad25-7c10e755784d" (UID: "946096c8-3935-4428-ad25-7c10e755784d"). InnerVolumeSpecName "kube-api-access-2qgpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.925803 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "946096c8-3935-4428-ad25-7c10e755784d" (UID: "946096c8-3935-4428-ad25-7c10e755784d"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.926929 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-inventory" (OuterVolumeSpecName: "inventory") pod "946096c8-3935-4428-ad25-7c10e755784d" (UID: "946096c8-3935-4428-ad25-7c10e755784d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.932931 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "946096c8-3935-4428-ad25-7c10e755784d" (UID: "946096c8-3935-4428-ad25-7c10e755784d"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.945739 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.945769 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qgpr\" (UniqueName: \"kubernetes.io/projected/946096c8-3935-4428-ad25-7c10e755784d-kube-api-access-2qgpr\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.945780 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.945791 4876 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.945800 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:04 crc kubenswrapper[4876]: I1215 09:23:04.947864 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "946096c8-3935-4428-ad25-7c10e755784d" (UID: "946096c8-3935-4428-ad25-7c10e755784d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:05 crc kubenswrapper[4876]: I1215 09:23:05.047357 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/946096c8-3935-4428-ad25-7c10e755784d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:05 crc kubenswrapper[4876]: I1215 09:23:05.115202 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" event={"ID":"946096c8-3935-4428-ad25-7c10e755784d","Type":"ContainerDied","Data":"d85b0e1fab78911274a6a46d7bd8b0b15714309b9e1beef81a2e6a118f1dccfb"} Dec 15 09:23:05 crc kubenswrapper[4876]: I1215 09:23:05.115251 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d85b0e1fab78911274a6a46d7bd8b0b15714309b9e1beef81a2e6a118f1dccfb" Dec 15 09:23:05 crc kubenswrapper[4876]: I1215 09:23:05.115420 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-networker-t2m59" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.186813 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5mp8k"] Dec 15 09:23:25 crc kubenswrapper[4876]: E1215 09:23:25.187841 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="946096c8-3935-4428-ad25-7c10e755784d" containerName="neutron-metadata-openstack-openstack-networker" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.187857 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="946096c8-3935-4428-ad25-7c10e755784d" containerName="neutron-metadata-openstack-openstack-networker" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.188066 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="946096c8-3935-4428-ad25-7c10e755784d" containerName="neutron-metadata-openstack-openstack-networker" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.189641 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.199820 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mp8k"] Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.262934 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-catalog-content\") pod \"redhat-marketplace-5mp8k\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.263024 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7sx4\" (UniqueName: \"kubernetes.io/projected/e17cd583-fb9b-45a3-8883-152b10114ae0-kube-api-access-r7sx4\") pod \"redhat-marketplace-5mp8k\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.263195 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-utilities\") pod \"redhat-marketplace-5mp8k\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.365062 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7sx4\" (UniqueName: \"kubernetes.io/projected/e17cd583-fb9b-45a3-8883-152b10114ae0-kube-api-access-r7sx4\") pod \"redhat-marketplace-5mp8k\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.365310 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-utilities\") pod \"redhat-marketplace-5mp8k\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.365498 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-catalog-content\") pod \"redhat-marketplace-5mp8k\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.366027 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-utilities\") pod \"redhat-marketplace-5mp8k\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.366086 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-catalog-content\") pod \"redhat-marketplace-5mp8k\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.400972 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7sx4\" (UniqueName: \"kubernetes.io/projected/e17cd583-fb9b-45a3-8883-152b10114ae0-kube-api-access-r7sx4\") pod \"redhat-marketplace-5mp8k\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:25 crc kubenswrapper[4876]: I1215 09:23:25.517351 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:26 crc kubenswrapper[4876]: I1215 09:23:26.086137 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mp8k"] Dec 15 09:23:26 crc kubenswrapper[4876]: I1215 09:23:26.317022 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mp8k" event={"ID":"e17cd583-fb9b-45a3-8883-152b10114ae0","Type":"ContainerStarted","Data":"9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92"} Dec 15 09:23:26 crc kubenswrapper[4876]: I1215 09:23:26.317069 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mp8k" event={"ID":"e17cd583-fb9b-45a3-8883-152b10114ae0","Type":"ContainerStarted","Data":"8a7964ff2eb7826eda61e9c1a6a1cdc182201ca4b30a091c9b41d5053e7cea24"} Dec 15 09:23:27 crc kubenswrapper[4876]: I1215 09:23:27.322230 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:23:27 crc kubenswrapper[4876]: I1215 09:23:27.322555 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:23:27 crc kubenswrapper[4876]: I1215 09:23:27.322600 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:23:27 crc kubenswrapper[4876]: I1215 09:23:27.323412 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:23:27 crc kubenswrapper[4876]: I1215 09:23:27.323480 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" gracePeriod=600 Dec 15 09:23:27 crc kubenswrapper[4876]: I1215 09:23:27.331736 4876 generic.go:334] "Generic (PLEG): container finished" podID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerID="9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92" exitCode=0 Dec 15 09:23:27 crc kubenswrapper[4876]: I1215 09:23:27.331783 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mp8k" event={"ID":"e17cd583-fb9b-45a3-8883-152b10114ae0","Type":"ContainerDied","Data":"9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92"} Dec 15 09:23:27 crc kubenswrapper[4876]: E1215 09:23:27.464690 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:23:28 crc kubenswrapper[4876]: I1215 09:23:28.345591 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" exitCode=0 Dec 15 09:23:28 crc kubenswrapper[4876]: I1215 09:23:28.345675 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14"} Dec 15 09:23:28 crc kubenswrapper[4876]: I1215 09:23:28.346022 4876 scope.go:117] "RemoveContainer" containerID="76bca0552147ae11fb9dc646aa45aa28e4f6dd0076be73075000c7f62adf0a15" Dec 15 09:23:28 crc kubenswrapper[4876]: I1215 09:23:28.346888 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:23:28 crc kubenswrapper[4876]: E1215 09:23:28.347265 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:23:29 crc kubenswrapper[4876]: I1215 09:23:29.364719 4876 generic.go:334] "Generic (PLEG): container finished" podID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerID="c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2" exitCode=0 Dec 15 09:23:29 crc kubenswrapper[4876]: I1215 09:23:29.365305 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mp8k" event={"ID":"e17cd583-fb9b-45a3-8883-152b10114ae0","Type":"ContainerDied","Data":"c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2"} Dec 15 09:23:30 crc kubenswrapper[4876]: I1215 09:23:30.376053 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mp8k" event={"ID":"e17cd583-fb9b-45a3-8883-152b10114ae0","Type":"ContainerStarted","Data":"493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316"} Dec 15 09:23:30 crc kubenswrapper[4876]: I1215 09:23:30.399282 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5mp8k" podStartSLOduration=2.949040222 podStartE2EDuration="5.399265659s" podCreationTimestamp="2025-12-15 09:23:25 +0000 UTC" firstStartedPulling="2025-12-15 09:23:27.334846433 +0000 UTC m=+9132.905989344" lastFinishedPulling="2025-12-15 09:23:29.78507187 +0000 UTC m=+9135.356214781" observedRunningTime="2025-12-15 09:23:30.393280148 +0000 UTC m=+9135.964423059" watchObservedRunningTime="2025-12-15 09:23:30.399265659 +0000 UTC m=+9135.970408560" Dec 15 09:23:35 crc kubenswrapper[4876]: I1215 09:23:35.518512 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:35 crc kubenswrapper[4876]: I1215 09:23:35.519025 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:35 crc kubenswrapper[4876]: I1215 09:23:35.570014 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:36 crc kubenswrapper[4876]: I1215 09:23:36.521059 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:36 crc kubenswrapper[4876]: I1215 09:23:36.572361 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mp8k"] Dec 15 09:23:38 crc kubenswrapper[4876]: I1215 09:23:38.490594 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5mp8k" podUID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerName="registry-server" containerID="cri-o://493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316" gracePeriod=2 Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.018346 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.070817 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-utilities\") pod \"e17cd583-fb9b-45a3-8883-152b10114ae0\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.071097 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7sx4\" (UniqueName: \"kubernetes.io/projected/e17cd583-fb9b-45a3-8883-152b10114ae0-kube-api-access-r7sx4\") pod \"e17cd583-fb9b-45a3-8883-152b10114ae0\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.071173 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-catalog-content\") pod \"e17cd583-fb9b-45a3-8883-152b10114ae0\" (UID: \"e17cd583-fb9b-45a3-8883-152b10114ae0\") " Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.071915 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-utilities" (OuterVolumeSpecName: "utilities") pod "e17cd583-fb9b-45a3-8883-152b10114ae0" (UID: "e17cd583-fb9b-45a3-8883-152b10114ae0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.076696 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e17cd583-fb9b-45a3-8883-152b10114ae0-kube-api-access-r7sx4" (OuterVolumeSpecName: "kube-api-access-r7sx4") pod "e17cd583-fb9b-45a3-8883-152b10114ae0" (UID: "e17cd583-fb9b-45a3-8883-152b10114ae0"). InnerVolumeSpecName "kube-api-access-r7sx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.095878 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e17cd583-fb9b-45a3-8883-152b10114ae0" (UID: "e17cd583-fb9b-45a3-8883-152b10114ae0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.173253 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.173287 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7sx4\" (UniqueName: \"kubernetes.io/projected/e17cd583-fb9b-45a3-8883-152b10114ae0-kube-api-access-r7sx4\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.173300 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e17cd583-fb9b-45a3-8883-152b10114ae0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.503604 4876 generic.go:334] "Generic (PLEG): container finished" podID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerID="493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316" exitCode=0 Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.503648 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mp8k" event={"ID":"e17cd583-fb9b-45a3-8883-152b10114ae0","Type":"ContainerDied","Data":"493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316"} Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.503675 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5mp8k" event={"ID":"e17cd583-fb9b-45a3-8883-152b10114ae0","Type":"ContainerDied","Data":"8a7964ff2eb7826eda61e9c1a6a1cdc182201ca4b30a091c9b41d5053e7cea24"} Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.503693 4876 scope.go:117] "RemoveContainer" containerID="493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.503846 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5mp8k" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.524019 4876 scope.go:117] "RemoveContainer" containerID="c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.539837 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mp8k"] Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.552562 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5mp8k"] Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.561387 4876 scope.go:117] "RemoveContainer" containerID="9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.602343 4876 scope.go:117] "RemoveContainer" containerID="493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316" Dec 15 09:23:39 crc kubenswrapper[4876]: E1215 09:23:39.603090 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316\": container with ID starting with 493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316 not found: ID does not exist" containerID="493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.603151 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316"} err="failed to get container status \"493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316\": rpc error: code = NotFound desc = could not find container \"493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316\": container with ID starting with 493e02c3d422bd37f2ca3151b4344f3f3e27a72aeb68d641001d4fb4bdb62316 not found: ID does not exist" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.603176 4876 scope.go:117] "RemoveContainer" containerID="c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2" Dec 15 09:23:39 crc kubenswrapper[4876]: E1215 09:23:39.603706 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2\": container with ID starting with c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2 not found: ID does not exist" containerID="c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.603754 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2"} err="failed to get container status \"c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2\": rpc error: code = NotFound desc = could not find container \"c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2\": container with ID starting with c2942c2bff955cfd84b10bf2c18581d1d148728e6e958b63498fc8c6c8218ee2 not found: ID does not exist" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.603785 4876 scope.go:117] "RemoveContainer" containerID="9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92" Dec 15 09:23:39 crc kubenswrapper[4876]: E1215 09:23:39.604245 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92\": container with ID starting with 9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92 not found: ID does not exist" containerID="9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92" Dec 15 09:23:39 crc kubenswrapper[4876]: I1215 09:23:39.604321 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92"} err="failed to get container status \"9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92\": rpc error: code = NotFound desc = could not find container \"9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92\": container with ID starting with 9dd5490eb5e033f052f55e2c3dc781ee6dbb70dcfaedf0ab8dc2759527ce8f92 not found: ID does not exist" Dec 15 09:23:40 crc kubenswrapper[4876]: I1215 09:23:40.727237 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e17cd583-fb9b-45a3-8883-152b10114ae0" path="/var/lib/kubelet/pods/e17cd583-fb9b-45a3-8883-152b10114ae0/volumes" Dec 15 09:23:42 crc kubenswrapper[4876]: I1215 09:23:42.705674 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:23:42 crc kubenswrapper[4876]: E1215 09:23:42.706396 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:23:54 crc kubenswrapper[4876]: I1215 09:23:54.646593 4876 generic.go:334] "Generic (PLEG): container finished" podID="75af2aac-a703-42f7-a933-4f2bac4af01f" containerID="d498ef1be0826dbc8d499724e4e46e0ea0f980135ef92f60382591ec3905f310" exitCode=0 Dec 15 09:23:54 crc kubenswrapper[4876]: I1215 09:23:54.646682 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" event={"ID":"75af2aac-a703-42f7-a933-4f2bac4af01f","Type":"ContainerDied","Data":"d498ef1be0826dbc8d499724e4e46e0ea0f980135ef92f60382591ec3905f310"} Dec 15 09:23:54 crc kubenswrapper[4876]: I1215 09:23:54.714046 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:23:54 crc kubenswrapper[4876]: E1215 09:23:54.714417 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.218835 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.381973 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-nova-metadata-neutron-config-0\") pod \"75af2aac-a703-42f7-a933-4f2bac4af01f\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.382037 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-inventory\") pod \"75af2aac-a703-42f7-a933-4f2bac4af01f\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.382079 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ceph\") pod \"75af2aac-a703-42f7-a933-4f2bac4af01f\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.382126 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-ovn-metadata-agent-neutron-config-0\") pod \"75af2aac-a703-42f7-a933-4f2bac4af01f\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.382162 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ssh-key\") pod \"75af2aac-a703-42f7-a933-4f2bac4af01f\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.382250 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-metadata-combined-ca-bundle\") pod \"75af2aac-a703-42f7-a933-4f2bac4af01f\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.382270 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh949\" (UniqueName: \"kubernetes.io/projected/75af2aac-a703-42f7-a933-4f2bac4af01f-kube-api-access-fh949\") pod \"75af2aac-a703-42f7-a933-4f2bac4af01f\" (UID: \"75af2aac-a703-42f7-a933-4f2bac4af01f\") " Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.388507 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "75af2aac-a703-42f7-a933-4f2bac4af01f" (UID: "75af2aac-a703-42f7-a933-4f2bac4af01f"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.388529 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ceph" (OuterVolumeSpecName: "ceph") pod "75af2aac-a703-42f7-a933-4f2bac4af01f" (UID: "75af2aac-a703-42f7-a933-4f2bac4af01f"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.389317 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75af2aac-a703-42f7-a933-4f2bac4af01f-kube-api-access-fh949" (OuterVolumeSpecName: "kube-api-access-fh949") pod "75af2aac-a703-42f7-a933-4f2bac4af01f" (UID: "75af2aac-a703-42f7-a933-4f2bac4af01f"). InnerVolumeSpecName "kube-api-access-fh949". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.416042 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "75af2aac-a703-42f7-a933-4f2bac4af01f" (UID: "75af2aac-a703-42f7-a933-4f2bac4af01f"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.420229 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "75af2aac-a703-42f7-a933-4f2bac4af01f" (UID: "75af2aac-a703-42f7-a933-4f2bac4af01f"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.420700 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-inventory" (OuterVolumeSpecName: "inventory") pod "75af2aac-a703-42f7-a933-4f2bac4af01f" (UID: "75af2aac-a703-42f7-a933-4f2bac4af01f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.429239 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "75af2aac-a703-42f7-a933-4f2bac4af01f" (UID: "75af2aac-a703-42f7-a933-4f2bac4af01f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.486859 4876 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.486911 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.486923 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.486933 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.486944 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.486952 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75af2aac-a703-42f7-a933-4f2bac4af01f-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.486962 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh949\" (UniqueName: \"kubernetes.io/projected/75af2aac-a703-42f7-a933-4f2bac4af01f-kube-api-access-fh949\") on node \"crc\" DevicePath \"\"" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.667601 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" event={"ID":"75af2aac-a703-42f7-a933-4f2bac4af01f","Type":"ContainerDied","Data":"0c7c7dbde56d0a5cd3f5161493d084d637afe8fdad1106105ad9ce43b5d2252c"} Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.667653 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c7c7dbde56d0a5cd3f5161493d084d637afe8fdad1106105ad9ce43b5d2252c" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.667815 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-xrzd5" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.817185 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-bxvgp"] Dec 15 09:23:56 crc kubenswrapper[4876]: E1215 09:23:56.817619 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerName="registry-server" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.817643 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerName="registry-server" Dec 15 09:23:56 crc kubenswrapper[4876]: E1215 09:23:56.817660 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerName="extract-utilities" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.817667 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerName="extract-utilities" Dec 15 09:23:56 crc kubenswrapper[4876]: E1215 09:23:56.817677 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75af2aac-a703-42f7-a933-4f2bac4af01f" containerName="neutron-metadata-openstack-openstack-cell1" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.817684 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="75af2aac-a703-42f7-a933-4f2bac4af01f" containerName="neutron-metadata-openstack-openstack-cell1" Dec 15 09:23:56 crc kubenswrapper[4876]: E1215 09:23:56.817693 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerName="extract-content" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.817699 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerName="extract-content" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.817931 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="75af2aac-a703-42f7-a933-4f2bac4af01f" containerName="neutron-metadata-openstack-openstack-cell1" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.817970 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e17cd583-fb9b-45a3-8883-152b10114ae0" containerName="registry-server" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.818776 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.822029 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.824510 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.824619 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.824855 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.826080 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.839945 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-bxvgp"] Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.996994 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.997174 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f4gq\" (UniqueName: \"kubernetes.io/projected/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-kube-api-access-4f4gq\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.997225 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ceph\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.997277 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.997353 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ssh-key\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:56 crc kubenswrapper[4876]: I1215 09:23:56.997384 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-inventory\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.100461 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ssh-key\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.100561 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-inventory\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.100656 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.100796 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f4gq\" (UniqueName: \"kubernetes.io/projected/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-kube-api-access-4f4gq\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.100852 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ceph\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.100886 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.107164 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.107790 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ceph\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.108217 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.111585 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-inventory\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.112376 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ssh-key\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.121269 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f4gq\" (UniqueName: \"kubernetes.io/projected/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-kube-api-access-4f4gq\") pod \"libvirt-openstack-openstack-cell1-bxvgp\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.139702 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.661529 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-bxvgp"] Dec 15 09:23:57 crc kubenswrapper[4876]: I1215 09:23:57.680867 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" event={"ID":"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134","Type":"ContainerStarted","Data":"603f521d26fab29fd67e493764f1cfede8f1b70b2430e1645e1f521c8ef4e243"} Dec 15 09:23:58 crc kubenswrapper[4876]: I1215 09:23:58.698246 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" event={"ID":"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134","Type":"ContainerStarted","Data":"9769927639a1836a34206efae575dfe5147412e29c9be953f921524302561422"} Dec 15 09:23:58 crc kubenswrapper[4876]: I1215 09:23:58.728500 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" podStartSLOduration=2.229605292 podStartE2EDuration="2.728472943s" podCreationTimestamp="2025-12-15 09:23:56 +0000 UTC" firstStartedPulling="2025-12-15 09:23:57.668405877 +0000 UTC m=+9163.239548788" lastFinishedPulling="2025-12-15 09:23:58.167273488 +0000 UTC m=+9163.738416439" observedRunningTime="2025-12-15 09:23:58.718500875 +0000 UTC m=+9164.289643806" watchObservedRunningTime="2025-12-15 09:23:58.728472943 +0000 UTC m=+9164.299615874" Dec 15 09:24:08 crc kubenswrapper[4876]: I1215 09:24:08.705610 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:24:08 crc kubenswrapper[4876]: E1215 09:24:08.706428 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:24:19 crc kubenswrapper[4876]: I1215 09:24:19.705702 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:24:19 crc kubenswrapper[4876]: E1215 09:24:19.706519 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:24:34 crc kubenswrapper[4876]: I1215 09:24:34.713330 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:24:34 crc kubenswrapper[4876]: E1215 09:24:34.714138 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:24:47 crc kubenswrapper[4876]: I1215 09:24:47.705992 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:24:47 crc kubenswrapper[4876]: E1215 09:24:47.706890 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:24:59 crc kubenswrapper[4876]: I1215 09:24:59.706282 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:24:59 crc kubenswrapper[4876]: E1215 09:24:59.707184 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:25:12 crc kubenswrapper[4876]: I1215 09:25:12.706366 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:25:12 crc kubenswrapper[4876]: E1215 09:25:12.707621 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:25:25 crc kubenswrapper[4876]: I1215 09:25:25.705816 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:25:25 crc kubenswrapper[4876]: E1215 09:25:25.706768 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.310449 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-65v7x"] Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.314910 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.331057 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-65v7x"] Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.377985 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4px8j\" (UniqueName: \"kubernetes.io/projected/c2520b0b-2087-4b4f-9344-a0583905f589-kube-api-access-4px8j\") pod \"certified-operators-65v7x\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.378129 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-catalog-content\") pod \"certified-operators-65v7x\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.378173 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-utilities\") pod \"certified-operators-65v7x\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.480023 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4px8j\" (UniqueName: \"kubernetes.io/projected/c2520b0b-2087-4b4f-9344-a0583905f589-kube-api-access-4px8j\") pod \"certified-operators-65v7x\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.480157 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-catalog-content\") pod \"certified-operators-65v7x\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.480205 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-utilities\") pod \"certified-operators-65v7x\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.480772 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-utilities\") pod \"certified-operators-65v7x\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.480774 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-catalog-content\") pod \"certified-operators-65v7x\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.502324 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4px8j\" (UniqueName: \"kubernetes.io/projected/c2520b0b-2087-4b4f-9344-a0583905f589-kube-api-access-4px8j\") pod \"certified-operators-65v7x\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.643319 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:38 crc kubenswrapper[4876]: I1215 09:25:38.709392 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:25:38 crc kubenswrapper[4876]: E1215 09:25:38.709656 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:25:39 crc kubenswrapper[4876]: I1215 09:25:39.245984 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-65v7x"] Dec 15 09:25:39 crc kubenswrapper[4876]: I1215 09:25:39.796194 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-65v7x" event={"ID":"c2520b0b-2087-4b4f-9344-a0583905f589","Type":"ContainerStarted","Data":"c98e296b4d9319c173331d751ec7df9c4a9ed65e27af16d80e0a7ed731d13578"} Dec 15 09:25:40 crc kubenswrapper[4876]: I1215 09:25:40.807904 4876 generic.go:334] "Generic (PLEG): container finished" podID="c2520b0b-2087-4b4f-9344-a0583905f589" containerID="001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62" exitCode=0 Dec 15 09:25:40 crc kubenswrapper[4876]: I1215 09:25:40.807989 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-65v7x" event={"ID":"c2520b0b-2087-4b4f-9344-a0583905f589","Type":"ContainerDied","Data":"001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62"} Dec 15 09:25:40 crc kubenswrapper[4876]: I1215 09:25:40.811086 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 09:25:42 crc kubenswrapper[4876]: I1215 09:25:42.831211 4876 generic.go:334] "Generic (PLEG): container finished" podID="c2520b0b-2087-4b4f-9344-a0583905f589" containerID="f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1" exitCode=0 Dec 15 09:25:42 crc kubenswrapper[4876]: I1215 09:25:42.831285 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-65v7x" event={"ID":"c2520b0b-2087-4b4f-9344-a0583905f589","Type":"ContainerDied","Data":"f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1"} Dec 15 09:25:43 crc kubenswrapper[4876]: I1215 09:25:43.844818 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-65v7x" event={"ID":"c2520b0b-2087-4b4f-9344-a0583905f589","Type":"ContainerStarted","Data":"a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e"} Dec 15 09:25:43 crc kubenswrapper[4876]: I1215 09:25:43.870523 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-65v7x" podStartSLOduration=3.393433542 podStartE2EDuration="5.87049751s" podCreationTimestamp="2025-12-15 09:25:38 +0000 UTC" firstStartedPulling="2025-12-15 09:25:40.81074896 +0000 UTC m=+9266.381891871" lastFinishedPulling="2025-12-15 09:25:43.287812928 +0000 UTC m=+9268.858955839" observedRunningTime="2025-12-15 09:25:43.865010843 +0000 UTC m=+9269.436153774" watchObservedRunningTime="2025-12-15 09:25:43.87049751 +0000 UTC m=+9269.441640421" Dec 15 09:25:48 crc kubenswrapper[4876]: I1215 09:25:48.643678 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:48 crc kubenswrapper[4876]: I1215 09:25:48.644435 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:48 crc kubenswrapper[4876]: I1215 09:25:48.701757 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:48 crc kubenswrapper[4876]: I1215 09:25:48.954932 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:49 crc kubenswrapper[4876]: I1215 09:25:49.019445 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-65v7x"] Dec 15 09:25:50 crc kubenswrapper[4876]: I1215 09:25:50.705866 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:25:50 crc kubenswrapper[4876]: E1215 09:25:50.706451 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:25:50 crc kubenswrapper[4876]: I1215 09:25:50.916795 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-65v7x" podUID="c2520b0b-2087-4b4f-9344-a0583905f589" containerName="registry-server" containerID="cri-o://a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e" gracePeriod=2 Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.454839 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.628198 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-catalog-content\") pod \"c2520b0b-2087-4b4f-9344-a0583905f589\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.628558 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4px8j\" (UniqueName: \"kubernetes.io/projected/c2520b0b-2087-4b4f-9344-a0583905f589-kube-api-access-4px8j\") pod \"c2520b0b-2087-4b4f-9344-a0583905f589\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.628617 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-utilities\") pod \"c2520b0b-2087-4b4f-9344-a0583905f589\" (UID: \"c2520b0b-2087-4b4f-9344-a0583905f589\") " Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.629999 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-utilities" (OuterVolumeSpecName: "utilities") pod "c2520b0b-2087-4b4f-9344-a0583905f589" (UID: "c2520b0b-2087-4b4f-9344-a0583905f589"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.635758 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2520b0b-2087-4b4f-9344-a0583905f589-kube-api-access-4px8j" (OuterVolumeSpecName: "kube-api-access-4px8j") pod "c2520b0b-2087-4b4f-9344-a0583905f589" (UID: "c2520b0b-2087-4b4f-9344-a0583905f589"). InnerVolumeSpecName "kube-api-access-4px8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.697318 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c2520b0b-2087-4b4f-9344-a0583905f589" (UID: "c2520b0b-2087-4b4f-9344-a0583905f589"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.731652 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.731733 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4px8j\" (UniqueName: \"kubernetes.io/projected/c2520b0b-2087-4b4f-9344-a0583905f589-kube-api-access-4px8j\") on node \"crc\" DevicePath \"\"" Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.731751 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2520b0b-2087-4b4f-9344-a0583905f589-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.927389 4876 generic.go:334] "Generic (PLEG): container finished" podID="c2520b0b-2087-4b4f-9344-a0583905f589" containerID="a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e" exitCode=0 Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.927442 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-65v7x" event={"ID":"c2520b0b-2087-4b4f-9344-a0583905f589","Type":"ContainerDied","Data":"a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e"} Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.927471 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-65v7x" event={"ID":"c2520b0b-2087-4b4f-9344-a0583905f589","Type":"ContainerDied","Data":"c98e296b4d9319c173331d751ec7df9c4a9ed65e27af16d80e0a7ed731d13578"} Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.927468 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-65v7x" Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.927488 4876 scope.go:117] "RemoveContainer" containerID="a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e" Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.952783 4876 scope.go:117] "RemoveContainer" containerID="f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1" Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.972684 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-65v7x"] Dec 15 09:25:51 crc kubenswrapper[4876]: I1215 09:25:51.982761 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-65v7x"] Dec 15 09:25:52 crc kubenswrapper[4876]: I1215 09:25:52.483953 4876 scope.go:117] "RemoveContainer" containerID="001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62" Dec 15 09:25:52 crc kubenswrapper[4876]: I1215 09:25:52.530805 4876 scope.go:117] "RemoveContainer" containerID="a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e" Dec 15 09:25:52 crc kubenswrapper[4876]: E1215 09:25:52.531585 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e\": container with ID starting with a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e not found: ID does not exist" containerID="a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e" Dec 15 09:25:52 crc kubenswrapper[4876]: I1215 09:25:52.531640 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e"} err="failed to get container status \"a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e\": rpc error: code = NotFound desc = could not find container \"a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e\": container with ID starting with a44bc2669ddf34139c38773f280ba6a7d6ecb1029bdaefdfd5903bcffa52ca6e not found: ID does not exist" Dec 15 09:25:52 crc kubenswrapper[4876]: I1215 09:25:52.531676 4876 scope.go:117] "RemoveContainer" containerID="f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1" Dec 15 09:25:52 crc kubenswrapper[4876]: E1215 09:25:52.532159 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1\": container with ID starting with f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1 not found: ID does not exist" containerID="f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1" Dec 15 09:25:52 crc kubenswrapper[4876]: I1215 09:25:52.532195 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1"} err="failed to get container status \"f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1\": rpc error: code = NotFound desc = could not find container \"f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1\": container with ID starting with f1ae383d25e7912ec16a2cb84c91843897d2126cf446b306c0815890a7d87dd1 not found: ID does not exist" Dec 15 09:25:52 crc kubenswrapper[4876]: I1215 09:25:52.532215 4876 scope.go:117] "RemoveContainer" containerID="001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62" Dec 15 09:25:52 crc kubenswrapper[4876]: E1215 09:25:52.532434 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62\": container with ID starting with 001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62 not found: ID does not exist" containerID="001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62" Dec 15 09:25:52 crc kubenswrapper[4876]: I1215 09:25:52.532479 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62"} err="failed to get container status \"001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62\": rpc error: code = NotFound desc = could not find container \"001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62\": container with ID starting with 001cc53bd6a92a6a8e725141fa8f0bad8a95e5437640d685de95324d0c337e62 not found: ID does not exist" Dec 15 09:25:52 crc kubenswrapper[4876]: I1215 09:25:52.717697 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2520b0b-2087-4b4f-9344-a0583905f589" path="/var/lib/kubelet/pods/c2520b0b-2087-4b4f-9344-a0583905f589/volumes" Dec 15 09:26:02 crc kubenswrapper[4876]: I1215 09:26:02.705413 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:26:02 crc kubenswrapper[4876]: E1215 09:26:02.706376 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:26:14 crc kubenswrapper[4876]: I1215 09:26:14.714958 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:26:14 crc kubenswrapper[4876]: E1215 09:26:14.715909 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:26:25 crc kubenswrapper[4876]: I1215 09:26:25.706194 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:26:25 crc kubenswrapper[4876]: E1215 09:26:25.706951 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:26:38 crc kubenswrapper[4876]: I1215 09:26:38.706147 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:26:38 crc kubenswrapper[4876]: E1215 09:26:38.707005 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:26:51 crc kubenswrapper[4876]: I1215 09:26:51.705994 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:26:51 crc kubenswrapper[4876]: E1215 09:26:51.706917 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:27:02 crc kubenswrapper[4876]: I1215 09:27:02.715612 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:27:02 crc kubenswrapper[4876]: E1215 09:27:02.716674 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:27:15 crc kubenswrapper[4876]: I1215 09:27:15.705377 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:27:15 crc kubenswrapper[4876]: E1215 09:27:15.707787 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:27:26 crc kubenswrapper[4876]: I1215 09:27:26.706303 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:27:26 crc kubenswrapper[4876]: E1215 09:27:26.708193 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:27:38 crc kubenswrapper[4876]: I1215 09:27:38.705849 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:27:38 crc kubenswrapper[4876]: E1215 09:27:38.706753 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:27:49 crc kubenswrapper[4876]: I1215 09:27:49.707304 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:27:49 crc kubenswrapper[4876]: E1215 09:27:49.708190 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:28:00 crc kubenswrapper[4876]: I1215 09:28:00.706241 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:28:00 crc kubenswrapper[4876]: E1215 09:28:00.708381 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:28:15 crc kubenswrapper[4876]: I1215 09:28:15.705748 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:28:15 crc kubenswrapper[4876]: E1215 09:28:15.706620 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:28:30 crc kubenswrapper[4876]: I1215 09:28:30.706191 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:28:31 crc kubenswrapper[4876]: I1215 09:28:31.593631 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"0d32f59ca1317b802ef07bcee10b3142637a4f83c159ca3f197305e9095c6be3"} Dec 15 09:29:01 crc kubenswrapper[4876]: I1215 09:29:01.888848 4876 generic.go:334] "Generic (PLEG): container finished" podID="9440ef3b-7ac3-4e2d-b34b-fd11e44b9134" containerID="9769927639a1836a34206efae575dfe5147412e29c9be953f921524302561422" exitCode=0 Dec 15 09:29:01 crc kubenswrapper[4876]: I1215 09:29:01.888937 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" event={"ID":"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134","Type":"ContainerDied","Data":"9769927639a1836a34206efae575dfe5147412e29c9be953f921524302561422"} Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.337174 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.442674 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ssh-key\") pod \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.442843 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-inventory\") pod \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.442924 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ceph\") pod \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.442969 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-secret-0\") pod \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.443047 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4f4gq\" (UniqueName: \"kubernetes.io/projected/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-kube-api-access-4f4gq\") pod \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.443093 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-combined-ca-bundle\") pod \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\" (UID: \"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134\") " Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.448709 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ceph" (OuterVolumeSpecName: "ceph") pod "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134" (UID: "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.448804 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-kube-api-access-4f4gq" (OuterVolumeSpecName: "kube-api-access-4f4gq") pod "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134" (UID: "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134"). InnerVolumeSpecName "kube-api-access-4f4gq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.449123 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134" (UID: "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.471158 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134" (UID: "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.473025 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-inventory" (OuterVolumeSpecName: "inventory") pod "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134" (UID: "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.475269 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134" (UID: "9440ef3b-7ac3-4e2d-b34b-fd11e44b9134"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.545831 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.546139 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.546150 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.546159 4876 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.546170 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4f4gq\" (UniqueName: \"kubernetes.io/projected/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-kube-api-access-4f4gq\") on node \"crc\" DevicePath \"\"" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.546179 4876 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9440ef3b-7ac3-4e2d-b34b-fd11e44b9134-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.914690 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" event={"ID":"9440ef3b-7ac3-4e2d-b34b-fd11e44b9134","Type":"ContainerDied","Data":"603f521d26fab29fd67e493764f1cfede8f1b70b2430e1645e1f521c8ef4e243"} Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.914735 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="603f521d26fab29fd67e493764f1cfede8f1b70b2430e1645e1f521c8ef4e243" Dec 15 09:29:03 crc kubenswrapper[4876]: I1215 09:29:03.914766 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-bxvgp" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.102996 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-gxkq7"] Dec 15 09:29:04 crc kubenswrapper[4876]: E1215 09:29:04.103470 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2520b0b-2087-4b4f-9344-a0583905f589" containerName="extract-content" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.103491 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2520b0b-2087-4b4f-9344-a0583905f589" containerName="extract-content" Dec 15 09:29:04 crc kubenswrapper[4876]: E1215 09:29:04.103524 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2520b0b-2087-4b4f-9344-a0583905f589" containerName="extract-utilities" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.103530 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2520b0b-2087-4b4f-9344-a0583905f589" containerName="extract-utilities" Dec 15 09:29:04 crc kubenswrapper[4876]: E1215 09:29:04.103542 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2520b0b-2087-4b4f-9344-a0583905f589" containerName="registry-server" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.103549 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2520b0b-2087-4b4f-9344-a0583905f589" containerName="registry-server" Dec 15 09:29:04 crc kubenswrapper[4876]: E1215 09:29:04.103555 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9440ef3b-7ac3-4e2d-b34b-fd11e44b9134" containerName="libvirt-openstack-openstack-cell1" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.103563 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9440ef3b-7ac3-4e2d-b34b-fd11e44b9134" containerName="libvirt-openstack-openstack-cell1" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.103749 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2520b0b-2087-4b4f-9344-a0583905f589" containerName="registry-server" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.103774 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9440ef3b-7ac3-4e2d-b34b-fd11e44b9134" containerName="libvirt-openstack-openstack-cell1" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.104537 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.106658 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.106769 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.107737 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.107993 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.108180 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.108365 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.108554 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.121272 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-gxkq7"] Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.158811 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.158920 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.158944 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-inventory\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.159061 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.159145 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.159201 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.159225 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.159277 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg4d6\" (UniqueName: \"kubernetes.io/projected/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-kube-api-access-qg4d6\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.159337 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ceph\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.159376 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.159401 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.259996 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.260074 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.260113 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.260143 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg4d6\" (UniqueName: \"kubernetes.io/projected/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-kube-api-access-qg4d6\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.260169 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ceph\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.260187 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.260206 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.260238 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.260303 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.260322 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-inventory\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.260369 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.262268 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.262587 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.265865 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.266728 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ceph\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.267547 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.267722 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.268027 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-inventory\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.269334 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.270754 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.271275 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.277674 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg4d6\" (UniqueName: \"kubernetes.io/projected/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-kube-api-access-qg4d6\") pod \"nova-cell1-openstack-openstack-cell1-gxkq7\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.426729 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:29:04 crc kubenswrapper[4876]: I1215 09:29:04.976201 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-gxkq7"] Dec 15 09:29:04 crc kubenswrapper[4876]: W1215 09:29:04.979089 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37dbdd47_81c0_4e73_b82c_7e8ceb930cc3.slice/crio-927ba1349be8b7cc7296bebad15bc25de895b6857477055157255ad1f0f38821 WatchSource:0}: Error finding container 927ba1349be8b7cc7296bebad15bc25de895b6857477055157255ad1f0f38821: Status 404 returned error can't find the container with id 927ba1349be8b7cc7296bebad15bc25de895b6857477055157255ad1f0f38821 Dec 15 09:29:05 crc kubenswrapper[4876]: I1215 09:29:05.949911 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" event={"ID":"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3","Type":"ContainerStarted","Data":"3904868851cb490a94473dee7f9a22c118a9cc42698e8c0b1f0e1fde4cd563ae"} Dec 15 09:29:05 crc kubenswrapper[4876]: I1215 09:29:05.950285 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" event={"ID":"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3","Type":"ContainerStarted","Data":"927ba1349be8b7cc7296bebad15bc25de895b6857477055157255ad1f0f38821"} Dec 15 09:29:05 crc kubenswrapper[4876]: I1215 09:29:05.978815 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" podStartSLOduration=1.52180817 podStartE2EDuration="1.978794956s" podCreationTimestamp="2025-12-15 09:29:04 +0000 UTC" firstStartedPulling="2025-12-15 09:29:04.981431274 +0000 UTC m=+9470.552574185" lastFinishedPulling="2025-12-15 09:29:05.43841806 +0000 UTC m=+9471.009560971" observedRunningTime="2025-12-15 09:29:05.97821239 +0000 UTC m=+9471.549355301" watchObservedRunningTime="2025-12-15 09:29:05.978794956 +0000 UTC m=+9471.549937877" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.152178 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm"] Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.154564 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.157016 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.157502 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.164343 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm"] Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.198625 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvnk6\" (UniqueName: \"kubernetes.io/projected/5059b4e9-2c92-4e83-8154-17f66d795ed9-kube-api-access-fvnk6\") pod \"collect-profiles-29429850-whvpm\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.199561 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5059b4e9-2c92-4e83-8154-17f66d795ed9-secret-volume\") pod \"collect-profiles-29429850-whvpm\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.199764 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5059b4e9-2c92-4e83-8154-17f66d795ed9-config-volume\") pod \"collect-profiles-29429850-whvpm\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.301919 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5059b4e9-2c92-4e83-8154-17f66d795ed9-secret-volume\") pod \"collect-profiles-29429850-whvpm\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.302016 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5059b4e9-2c92-4e83-8154-17f66d795ed9-config-volume\") pod \"collect-profiles-29429850-whvpm\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.302073 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvnk6\" (UniqueName: \"kubernetes.io/projected/5059b4e9-2c92-4e83-8154-17f66d795ed9-kube-api-access-fvnk6\") pod \"collect-profiles-29429850-whvpm\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.303602 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5059b4e9-2c92-4e83-8154-17f66d795ed9-config-volume\") pod \"collect-profiles-29429850-whvpm\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.316355 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5059b4e9-2c92-4e83-8154-17f66d795ed9-secret-volume\") pod \"collect-profiles-29429850-whvpm\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.319351 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvnk6\" (UniqueName: \"kubernetes.io/projected/5059b4e9-2c92-4e83-8154-17f66d795ed9-kube-api-access-fvnk6\") pod \"collect-profiles-29429850-whvpm\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.485216 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:00 crc kubenswrapper[4876]: I1215 09:30:00.988239 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm"] Dec 15 09:30:01 crc kubenswrapper[4876]: I1215 09:30:01.511544 4876 generic.go:334] "Generic (PLEG): container finished" podID="5059b4e9-2c92-4e83-8154-17f66d795ed9" containerID="ab8e24d6b876ea0ad2ff9f024061dda3c87ccfc90bf04f4b47e482898fa89dee" exitCode=0 Dec 15 09:30:01 crc kubenswrapper[4876]: I1215 09:30:01.511587 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" event={"ID":"5059b4e9-2c92-4e83-8154-17f66d795ed9","Type":"ContainerDied","Data":"ab8e24d6b876ea0ad2ff9f024061dda3c87ccfc90bf04f4b47e482898fa89dee"} Dec 15 09:30:01 crc kubenswrapper[4876]: I1215 09:30:01.511817 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" event={"ID":"5059b4e9-2c92-4e83-8154-17f66d795ed9","Type":"ContainerStarted","Data":"05061dbb19047e2702f0ca28ad8a382ee2587aa1d1f68dd270b29b0d66b4e5dc"} Dec 15 09:30:02 crc kubenswrapper[4876]: I1215 09:30:02.924376 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.064329 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvnk6\" (UniqueName: \"kubernetes.io/projected/5059b4e9-2c92-4e83-8154-17f66d795ed9-kube-api-access-fvnk6\") pod \"5059b4e9-2c92-4e83-8154-17f66d795ed9\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.064495 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5059b4e9-2c92-4e83-8154-17f66d795ed9-secret-volume\") pod \"5059b4e9-2c92-4e83-8154-17f66d795ed9\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.064518 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5059b4e9-2c92-4e83-8154-17f66d795ed9-config-volume\") pod \"5059b4e9-2c92-4e83-8154-17f66d795ed9\" (UID: \"5059b4e9-2c92-4e83-8154-17f66d795ed9\") " Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.065348 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5059b4e9-2c92-4e83-8154-17f66d795ed9-config-volume" (OuterVolumeSpecName: "config-volume") pod "5059b4e9-2c92-4e83-8154-17f66d795ed9" (UID: "5059b4e9-2c92-4e83-8154-17f66d795ed9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.070256 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5059b4e9-2c92-4e83-8154-17f66d795ed9-kube-api-access-fvnk6" (OuterVolumeSpecName: "kube-api-access-fvnk6") pod "5059b4e9-2c92-4e83-8154-17f66d795ed9" (UID: "5059b4e9-2c92-4e83-8154-17f66d795ed9"). InnerVolumeSpecName "kube-api-access-fvnk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.070597 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5059b4e9-2c92-4e83-8154-17f66d795ed9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5059b4e9-2c92-4e83-8154-17f66d795ed9" (UID: "5059b4e9-2c92-4e83-8154-17f66d795ed9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.166636 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvnk6\" (UniqueName: \"kubernetes.io/projected/5059b4e9-2c92-4e83-8154-17f66d795ed9-kube-api-access-fvnk6\") on node \"crc\" DevicePath \"\"" Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.166675 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5059b4e9-2c92-4e83-8154-17f66d795ed9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.166688 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5059b4e9-2c92-4e83-8154-17f66d795ed9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.531861 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" event={"ID":"5059b4e9-2c92-4e83-8154-17f66d795ed9","Type":"ContainerDied","Data":"05061dbb19047e2702f0ca28ad8a382ee2587aa1d1f68dd270b29b0d66b4e5dc"} Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.532187 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05061dbb19047e2702f0ca28ad8a382ee2587aa1d1f68dd270b29b0d66b4e5dc" Dec 15 09:30:03 crc kubenswrapper[4876]: I1215 09:30:03.532242 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm" Dec 15 09:30:04 crc kubenswrapper[4876]: I1215 09:30:04.003477 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms"] Dec 15 09:30:04 crc kubenswrapper[4876]: I1215 09:30:04.014091 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429805-rqgms"] Dec 15 09:30:04 crc kubenswrapper[4876]: I1215 09:30:04.739972 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="098b7466-0828-4b42-855c-9e4095061276" path="/var/lib/kubelet/pods/098b7466-0828-4b42-855c-9e4095061276/volumes" Dec 15 09:30:31 crc kubenswrapper[4876]: I1215 09:30:31.675650 4876 scope.go:117] "RemoveContainer" containerID="6d2ce8d6a0d6d5c60dc97123a54f65af108248163b966c31d4a1a06febea33a2" Dec 15 09:30:42 crc kubenswrapper[4876]: I1215 09:30:42.873320 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zhb7w"] Dec 15 09:30:42 crc kubenswrapper[4876]: E1215 09:30:42.874350 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5059b4e9-2c92-4e83-8154-17f66d795ed9" containerName="collect-profiles" Dec 15 09:30:42 crc kubenswrapper[4876]: I1215 09:30:42.874363 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="5059b4e9-2c92-4e83-8154-17f66d795ed9" containerName="collect-profiles" Dec 15 09:30:42 crc kubenswrapper[4876]: I1215 09:30:42.874555 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="5059b4e9-2c92-4e83-8154-17f66d795ed9" containerName="collect-profiles" Dec 15 09:30:42 crc kubenswrapper[4876]: I1215 09:30:42.876080 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:42 crc kubenswrapper[4876]: I1215 09:30:42.909779 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zhb7w"] Dec 15 09:30:42 crc kubenswrapper[4876]: I1215 09:30:42.915938 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9csm4\" (UniqueName: \"kubernetes.io/projected/7c184290-7e3c-405e-88ac-eb5d23da4a10-kube-api-access-9csm4\") pod \"community-operators-zhb7w\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:42 crc kubenswrapper[4876]: I1215 09:30:42.916125 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-utilities\") pod \"community-operators-zhb7w\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:42 crc kubenswrapper[4876]: I1215 09:30:42.916152 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-catalog-content\") pod \"community-operators-zhb7w\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:43 crc kubenswrapper[4876]: I1215 09:30:43.017926 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-catalog-content\") pod \"community-operators-zhb7w\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:43 crc kubenswrapper[4876]: I1215 09:30:43.018051 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9csm4\" (UniqueName: \"kubernetes.io/projected/7c184290-7e3c-405e-88ac-eb5d23da4a10-kube-api-access-9csm4\") pod \"community-operators-zhb7w\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:43 crc kubenswrapper[4876]: I1215 09:30:43.018243 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-utilities\") pod \"community-operators-zhb7w\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:43 crc kubenswrapper[4876]: I1215 09:30:43.019526 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-catalog-content\") pod \"community-operators-zhb7w\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:43 crc kubenswrapper[4876]: I1215 09:30:43.021366 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-utilities\") pod \"community-operators-zhb7w\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:43 crc kubenswrapper[4876]: I1215 09:30:43.041845 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9csm4\" (UniqueName: \"kubernetes.io/projected/7c184290-7e3c-405e-88ac-eb5d23da4a10-kube-api-access-9csm4\") pod \"community-operators-zhb7w\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:43 crc kubenswrapper[4876]: I1215 09:30:43.215745 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:43 crc kubenswrapper[4876]: I1215 09:30:43.840824 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zhb7w"] Dec 15 09:30:43 crc kubenswrapper[4876]: I1215 09:30:43.922301 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhb7w" event={"ID":"7c184290-7e3c-405e-88ac-eb5d23da4a10","Type":"ContainerStarted","Data":"08b9abd0c3157842a947e88b21eea604f676cd9e708e499927b9267202ca26bc"} Dec 15 09:30:44 crc kubenswrapper[4876]: I1215 09:30:44.934721 4876 generic.go:334] "Generic (PLEG): container finished" podID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerID="ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b" exitCode=0 Dec 15 09:30:44 crc kubenswrapper[4876]: I1215 09:30:44.934985 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhb7w" event={"ID":"7c184290-7e3c-405e-88ac-eb5d23da4a10","Type":"ContainerDied","Data":"ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b"} Dec 15 09:30:44 crc kubenswrapper[4876]: I1215 09:30:44.944401 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 09:30:45 crc kubenswrapper[4876]: I1215 09:30:45.947819 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhb7w" event={"ID":"7c184290-7e3c-405e-88ac-eb5d23da4a10","Type":"ContainerStarted","Data":"4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd"} Dec 15 09:30:46 crc kubenswrapper[4876]: I1215 09:30:46.961914 4876 generic.go:334] "Generic (PLEG): container finished" podID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerID="4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd" exitCode=0 Dec 15 09:30:46 crc kubenswrapper[4876]: I1215 09:30:46.962256 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhb7w" event={"ID":"7c184290-7e3c-405e-88ac-eb5d23da4a10","Type":"ContainerDied","Data":"4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd"} Dec 15 09:30:47 crc kubenswrapper[4876]: I1215 09:30:47.971621 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhb7w" event={"ID":"7c184290-7e3c-405e-88ac-eb5d23da4a10","Type":"ContainerStarted","Data":"702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464"} Dec 15 09:30:47 crc kubenswrapper[4876]: I1215 09:30:47.997297 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zhb7w" podStartSLOduration=3.372998403 podStartE2EDuration="5.997275515s" podCreationTimestamp="2025-12-15 09:30:42 +0000 UTC" firstStartedPulling="2025-12-15 09:30:44.937493454 +0000 UTC m=+9570.508636375" lastFinishedPulling="2025-12-15 09:30:47.561770566 +0000 UTC m=+9573.132913487" observedRunningTime="2025-12-15 09:30:47.989285701 +0000 UTC m=+9573.560428612" watchObservedRunningTime="2025-12-15 09:30:47.997275515 +0000 UTC m=+9573.568418436" Dec 15 09:30:53 crc kubenswrapper[4876]: I1215 09:30:53.216856 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:53 crc kubenswrapper[4876]: I1215 09:30:53.217703 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:53 crc kubenswrapper[4876]: I1215 09:30:53.265607 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:54 crc kubenswrapper[4876]: I1215 09:30:54.070082 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:54 crc kubenswrapper[4876]: I1215 09:30:54.123173 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zhb7w"] Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.041435 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zhb7w" podUID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerName="registry-server" containerID="cri-o://702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464" gracePeriod=2 Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.548702 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.636052 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-utilities\") pod \"7c184290-7e3c-405e-88ac-eb5d23da4a10\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.636120 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-catalog-content\") pod \"7c184290-7e3c-405e-88ac-eb5d23da4a10\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.636247 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9csm4\" (UniqueName: \"kubernetes.io/projected/7c184290-7e3c-405e-88ac-eb5d23da4a10-kube-api-access-9csm4\") pod \"7c184290-7e3c-405e-88ac-eb5d23da4a10\" (UID: \"7c184290-7e3c-405e-88ac-eb5d23da4a10\") " Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.639215 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-utilities" (OuterVolumeSpecName: "utilities") pod "7c184290-7e3c-405e-88ac-eb5d23da4a10" (UID: "7c184290-7e3c-405e-88ac-eb5d23da4a10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.641828 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c184290-7e3c-405e-88ac-eb5d23da4a10-kube-api-access-9csm4" (OuterVolumeSpecName: "kube-api-access-9csm4") pod "7c184290-7e3c-405e-88ac-eb5d23da4a10" (UID: "7c184290-7e3c-405e-88ac-eb5d23da4a10"). InnerVolumeSpecName "kube-api-access-9csm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.692651 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c184290-7e3c-405e-88ac-eb5d23da4a10" (UID: "7c184290-7e3c-405e-88ac-eb5d23da4a10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.738472 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.738526 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c184290-7e3c-405e-88ac-eb5d23da4a10-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:30:56 crc kubenswrapper[4876]: I1215 09:30:56.738543 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9csm4\" (UniqueName: \"kubernetes.io/projected/7c184290-7e3c-405e-88ac-eb5d23da4a10-kube-api-access-9csm4\") on node \"crc\" DevicePath \"\"" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.052233 4876 generic.go:334] "Generic (PLEG): container finished" podID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerID="702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464" exitCode=0 Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.052278 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhb7w" event={"ID":"7c184290-7e3c-405e-88ac-eb5d23da4a10","Type":"ContainerDied","Data":"702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464"} Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.052295 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zhb7w" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.052307 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zhb7w" event={"ID":"7c184290-7e3c-405e-88ac-eb5d23da4a10","Type":"ContainerDied","Data":"08b9abd0c3157842a947e88b21eea604f676cd9e708e499927b9267202ca26bc"} Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.052326 4876 scope.go:117] "RemoveContainer" containerID="702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.075635 4876 scope.go:117] "RemoveContainer" containerID="4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.085930 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zhb7w"] Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.099178 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zhb7w"] Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.119407 4876 scope.go:117] "RemoveContainer" containerID="ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.171475 4876 scope.go:117] "RemoveContainer" containerID="702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464" Dec 15 09:30:57 crc kubenswrapper[4876]: E1215 09:30:57.171954 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464\": container with ID starting with 702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464 not found: ID does not exist" containerID="702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.172000 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464"} err="failed to get container status \"702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464\": rpc error: code = NotFound desc = could not find container \"702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464\": container with ID starting with 702b6ddadf6339aadeb2871e71478b781876bfc737054ccf8fd9a43703817464 not found: ID does not exist" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.172032 4876 scope.go:117] "RemoveContainer" containerID="4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd" Dec 15 09:30:57 crc kubenswrapper[4876]: E1215 09:30:57.172356 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd\": container with ID starting with 4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd not found: ID does not exist" containerID="4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.172416 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd"} err="failed to get container status \"4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd\": rpc error: code = NotFound desc = could not find container \"4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd\": container with ID starting with 4359d3d5abc8f888de487109046b2f26d9db35cabc8694727dbb4e6f44b6ffcd not found: ID does not exist" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.172448 4876 scope.go:117] "RemoveContainer" containerID="ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b" Dec 15 09:30:57 crc kubenswrapper[4876]: E1215 09:30:57.172707 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b\": container with ID starting with ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b not found: ID does not exist" containerID="ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.172736 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b"} err="failed to get container status \"ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b\": rpc error: code = NotFound desc = could not find container \"ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b\": container with ID starting with ccbc40c512d6592a93c1de0396dc2b4e06109c4383994fa48585ae13a951350b not found: ID does not exist" Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.323252 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:30:57 crc kubenswrapper[4876]: I1215 09:30:57.323314 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:30:58 crc kubenswrapper[4876]: I1215 09:30:58.717219 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c184290-7e3c-405e-88ac-eb5d23da4a10" path="/var/lib/kubelet/pods/7c184290-7e3c-405e-88ac-eb5d23da4a10/volumes" Dec 15 09:31:27 crc kubenswrapper[4876]: I1215 09:31:27.324741 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:31:27 crc kubenswrapper[4876]: I1215 09:31:27.325379 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:31:57 crc kubenswrapper[4876]: I1215 09:31:57.323278 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:31:57 crc kubenswrapper[4876]: I1215 09:31:57.323971 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:31:57 crc kubenswrapper[4876]: I1215 09:31:57.324026 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:31:57 crc kubenswrapper[4876]: I1215 09:31:57.324855 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0d32f59ca1317b802ef07bcee10b3142637a4f83c159ca3f197305e9095c6be3"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:31:57 crc kubenswrapper[4876]: I1215 09:31:57.324935 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://0d32f59ca1317b802ef07bcee10b3142637a4f83c159ca3f197305e9095c6be3" gracePeriod=600 Dec 15 09:31:57 crc kubenswrapper[4876]: I1215 09:31:57.685023 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="0d32f59ca1317b802ef07bcee10b3142637a4f83c159ca3f197305e9095c6be3" exitCode=0 Dec 15 09:31:57 crc kubenswrapper[4876]: I1215 09:31:57.685123 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"0d32f59ca1317b802ef07bcee10b3142637a4f83c159ca3f197305e9095c6be3"} Dec 15 09:31:57 crc kubenswrapper[4876]: I1215 09:31:57.685375 4876 scope.go:117] "RemoveContainer" containerID="5faf5f0c7b3f4de70ed6b79843a014ab22004fd398fc658b7fa1df17e219be14" Dec 15 09:31:58 crc kubenswrapper[4876]: I1215 09:31:58.695891 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba"} Dec 15 09:32:42 crc kubenswrapper[4876]: I1215 09:32:42.112556 4876 generic.go:334] "Generic (PLEG): container finished" podID="37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" containerID="3904868851cb490a94473dee7f9a22c118a9cc42698e8c0b1f0e1fde4cd563ae" exitCode=0 Dec 15 09:32:42 crc kubenswrapper[4876]: I1215 09:32:42.113054 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" event={"ID":"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3","Type":"ContainerDied","Data":"3904868851cb490a94473dee7f9a22c118a9cc42698e8c0b1f0e1fde4cd563ae"} Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.590415 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.660692 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-0\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.660747 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg4d6\" (UniqueName: \"kubernetes.io/projected/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-kube-api-access-qg4d6\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.660864 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ceph\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.660896 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-combined-ca-bundle\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.660913 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-inventory\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.660949 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-1\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.661022 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-1\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.661127 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-0\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.661184 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ssh-key\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.661233 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-1\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.661251 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-0\") pod \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\" (UID: \"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3\") " Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.668122 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-kube-api-access-qg4d6" (OuterVolumeSpecName: "kube-api-access-qg4d6") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "kube-api-access-qg4d6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.673713 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ceph" (OuterVolumeSpecName: "ceph") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.687510 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.695234 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.695768 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.695795 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-inventory" (OuterVolumeSpecName: "inventory") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.696893 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.704920 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.709779 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.711321 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.712403 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" (UID: "37dbdd47-81c0-4e73-b82c-7e8ceb930cc3"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.763907 4876 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.763957 4876 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.763969 4876 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.763980 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg4d6\" (UniqueName: \"kubernetes.io/projected/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-kube-api-access-qg4d6\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.763990 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.764053 4876 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.764065 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.764080 4876 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.764089 4876 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.764116 4876 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:43 crc kubenswrapper[4876]: I1215 09:32:43.764132 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/37dbdd47-81c0-4e73-b82c-7e8ceb930cc3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.134497 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" event={"ID":"37dbdd47-81c0-4e73-b82c-7e8ceb930cc3","Type":"ContainerDied","Data":"927ba1349be8b7cc7296bebad15bc25de895b6857477055157255ad1f0f38821"} Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.134538 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="927ba1349be8b7cc7296bebad15bc25de895b6857477055157255ad1f0f38821" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.134590 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-gxkq7" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.307159 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-nb9mg"] Dec 15 09:32:44 crc kubenswrapper[4876]: E1215 09:32:44.307731 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerName="extract-utilities" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.307757 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerName="extract-utilities" Dec 15 09:32:44 crc kubenswrapper[4876]: E1215 09:32:44.307778 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerName="extract-content" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.307787 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerName="extract-content" Dec 15 09:32:44 crc kubenswrapper[4876]: E1215 09:32:44.307822 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerName="registry-server" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.307832 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerName="registry-server" Dec 15 09:32:44 crc kubenswrapper[4876]: E1215 09:32:44.307867 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" containerName="nova-cell1-openstack-openstack-cell1" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.307876 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" containerName="nova-cell1-openstack-openstack-cell1" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.308128 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c184290-7e3c-405e-88ac-eb5d23da4a10" containerName="registry-server" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.308175 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="37dbdd47-81c0-4e73-b82c-7e8ceb930cc3" containerName="nova-cell1-openstack-openstack-cell1" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.310202 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.312817 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.313405 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.313455 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.313601 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.313745 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.320135 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-nb9mg"] Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.376345 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.376586 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-inventory\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.376696 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ssh-key\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.376778 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.376901 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.376987 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.377032 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceph\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.377073 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqmbr\" (UniqueName: \"kubernetes.io/projected/bad1d359-4c34-4059-9eb0-f2f77d5bf834-kube-api-access-nqmbr\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.479587 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.479978 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceph\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.480036 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqmbr\" (UniqueName: \"kubernetes.io/projected/bad1d359-4c34-4059-9eb0-f2f77d5bf834-kube-api-access-nqmbr\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.480152 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.480236 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-inventory\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.480311 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ssh-key\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.480359 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.480441 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.485295 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceph\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.485320 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.485755 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.486842 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ssh-key\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.487328 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-inventory\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.490443 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.498024 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqmbr\" (UniqueName: \"kubernetes.io/projected/bad1d359-4c34-4059-9eb0-f2f77d5bf834-kube-api-access-nqmbr\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.503456 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-nb9mg\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:44 crc kubenswrapper[4876]: I1215 09:32:44.631511 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:32:45 crc kubenswrapper[4876]: I1215 09:32:45.265531 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-nb9mg"] Dec 15 09:32:46 crc kubenswrapper[4876]: I1215 09:32:46.159091 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" event={"ID":"bad1d359-4c34-4059-9eb0-f2f77d5bf834","Type":"ContainerStarted","Data":"ae5934cf9f17ba36be2633278617ae511c1924bb26d332b975fedebf34e7bc02"} Dec 15 09:32:46 crc kubenswrapper[4876]: I1215 09:32:46.159737 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" event={"ID":"bad1d359-4c34-4059-9eb0-f2f77d5bf834","Type":"ContainerStarted","Data":"d2325725f9a6d78573522434a596101aa506487e6843d8037cdff01c955c70d5"} Dec 15 09:32:46 crc kubenswrapper[4876]: I1215 09:32:46.186811 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" podStartSLOduration=1.674510856 podStartE2EDuration="2.186785822s" podCreationTimestamp="2025-12-15 09:32:44 +0000 UTC" firstStartedPulling="2025-12-15 09:32:45.28609592 +0000 UTC m=+9690.857238841" lastFinishedPulling="2025-12-15 09:32:45.798370896 +0000 UTC m=+9691.369513807" observedRunningTime="2025-12-15 09:32:46.180924765 +0000 UTC m=+9691.752067686" watchObservedRunningTime="2025-12-15 09:32:46.186785822 +0000 UTC m=+9691.757928733" Dec 15 09:33:57 crc kubenswrapper[4876]: I1215 09:33:57.322266 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:33:57 crc kubenswrapper[4876]: I1215 09:33:57.322872 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.268518 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vfw7w"] Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.271884 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.277270 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-catalog-content\") pod \"redhat-marketplace-vfw7w\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.277340 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77hnn\" (UniqueName: \"kubernetes.io/projected/986d5526-05e9-4da5-a9c7-fdfba020bbf9-kube-api-access-77hnn\") pod \"redhat-marketplace-vfw7w\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.277529 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-utilities\") pod \"redhat-marketplace-vfw7w\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.285374 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vfw7w"] Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.379181 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77hnn\" (UniqueName: \"kubernetes.io/projected/986d5526-05e9-4da5-a9c7-fdfba020bbf9-kube-api-access-77hnn\") pod \"redhat-marketplace-vfw7w\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.379333 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-utilities\") pod \"redhat-marketplace-vfw7w\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.379440 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-catalog-content\") pod \"redhat-marketplace-vfw7w\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.379937 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-utilities\") pod \"redhat-marketplace-vfw7w\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.380064 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-catalog-content\") pod \"redhat-marketplace-vfw7w\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.398758 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77hnn\" (UniqueName: \"kubernetes.io/projected/986d5526-05e9-4da5-a9c7-fdfba020bbf9-kube-api-access-77hnn\") pod \"redhat-marketplace-vfw7w\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.471905 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9nr99"] Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.474366 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.480809 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-catalog-content\") pod \"redhat-operators-9nr99\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.480849 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-utilities\") pod \"redhat-operators-9nr99\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.480936 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx7zc\" (UniqueName: \"kubernetes.io/projected/110b0c5b-367a-4478-8e6f-0dc9a071e03a-kube-api-access-hx7zc\") pod \"redhat-operators-9nr99\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.488382 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9nr99"] Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.582669 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-catalog-content\") pod \"redhat-operators-9nr99\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.582729 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-utilities\") pod \"redhat-operators-9nr99\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.582841 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx7zc\" (UniqueName: \"kubernetes.io/projected/110b0c5b-367a-4478-8e6f-0dc9a071e03a-kube-api-access-hx7zc\") pod \"redhat-operators-9nr99\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.583795 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-catalog-content\") pod \"redhat-operators-9nr99\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.583827 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-utilities\") pod \"redhat-operators-9nr99\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.609634 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.610189 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx7zc\" (UniqueName: \"kubernetes.io/projected/110b0c5b-367a-4478-8e6f-0dc9a071e03a-kube-api-access-hx7zc\") pod \"redhat-operators-9nr99\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:00 crc kubenswrapper[4876]: I1215 09:34:00.799585 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:01 crc kubenswrapper[4876]: I1215 09:34:01.207125 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vfw7w"] Dec 15 09:34:01 crc kubenswrapper[4876]: I1215 09:34:01.398627 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9nr99"] Dec 15 09:34:01 crc kubenswrapper[4876]: I1215 09:34:01.895282 4876 generic.go:334] "Generic (PLEG): container finished" podID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerID="af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7" exitCode=0 Dec 15 09:34:01 crc kubenswrapper[4876]: I1215 09:34:01.895381 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vfw7w" event={"ID":"986d5526-05e9-4da5-a9c7-fdfba020bbf9","Type":"ContainerDied","Data":"af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7"} Dec 15 09:34:01 crc kubenswrapper[4876]: I1215 09:34:01.895585 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vfw7w" event={"ID":"986d5526-05e9-4da5-a9c7-fdfba020bbf9","Type":"ContainerStarted","Data":"14d2332d09c304af082bf33fd4a1dfae0432b1adba0ec86932807dcb88ef73e2"} Dec 15 09:34:01 crc kubenswrapper[4876]: I1215 09:34:01.897588 4876 generic.go:334] "Generic (PLEG): container finished" podID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerID="5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29" exitCode=0 Dec 15 09:34:01 crc kubenswrapper[4876]: I1215 09:34:01.897618 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9nr99" event={"ID":"110b0c5b-367a-4478-8e6f-0dc9a071e03a","Type":"ContainerDied","Data":"5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29"} Dec 15 09:34:01 crc kubenswrapper[4876]: I1215 09:34:01.897641 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9nr99" event={"ID":"110b0c5b-367a-4478-8e6f-0dc9a071e03a","Type":"ContainerStarted","Data":"80e5fac1ce199fc86b39263d827778540e9c022b4bf75c669f37ac86f1db025b"} Dec 15 09:34:03 crc kubenswrapper[4876]: I1215 09:34:03.924204 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9nr99" event={"ID":"110b0c5b-367a-4478-8e6f-0dc9a071e03a","Type":"ContainerStarted","Data":"4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420"} Dec 15 09:34:03 crc kubenswrapper[4876]: I1215 09:34:03.926668 4876 generic.go:334] "Generic (PLEG): container finished" podID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerID="7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5" exitCode=0 Dec 15 09:34:03 crc kubenswrapper[4876]: I1215 09:34:03.926720 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vfw7w" event={"ID":"986d5526-05e9-4da5-a9c7-fdfba020bbf9","Type":"ContainerDied","Data":"7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5"} Dec 15 09:34:05 crc kubenswrapper[4876]: I1215 09:34:05.951866 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vfw7w" event={"ID":"986d5526-05e9-4da5-a9c7-fdfba020bbf9","Type":"ContainerStarted","Data":"4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0"} Dec 15 09:34:05 crc kubenswrapper[4876]: I1215 09:34:05.991167 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vfw7w" podStartSLOduration=3.217867664 podStartE2EDuration="5.991061829s" podCreationTimestamp="2025-12-15 09:34:00 +0000 UTC" firstStartedPulling="2025-12-15 09:34:01.896946083 +0000 UTC m=+9767.468088994" lastFinishedPulling="2025-12-15 09:34:04.670140238 +0000 UTC m=+9770.241283159" observedRunningTime="2025-12-15 09:34:05.974125465 +0000 UTC m=+9771.545268376" watchObservedRunningTime="2025-12-15 09:34:05.991061829 +0000 UTC m=+9771.562204740" Dec 15 09:34:06 crc kubenswrapper[4876]: I1215 09:34:06.966914 4876 generic.go:334] "Generic (PLEG): container finished" podID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerID="4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420" exitCode=0 Dec 15 09:34:06 crc kubenswrapper[4876]: I1215 09:34:06.967006 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9nr99" event={"ID":"110b0c5b-367a-4478-8e6f-0dc9a071e03a","Type":"ContainerDied","Data":"4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420"} Dec 15 09:34:07 crc kubenswrapper[4876]: I1215 09:34:07.980456 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9nr99" event={"ID":"110b0c5b-367a-4478-8e6f-0dc9a071e03a","Type":"ContainerStarted","Data":"ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365"} Dec 15 09:34:08 crc kubenswrapper[4876]: I1215 09:34:08.000166 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9nr99" podStartSLOduration=2.506236622 podStartE2EDuration="8.000140314s" podCreationTimestamp="2025-12-15 09:34:00 +0000 UTC" firstStartedPulling="2025-12-15 09:34:01.899325847 +0000 UTC m=+9767.470468758" lastFinishedPulling="2025-12-15 09:34:07.393229539 +0000 UTC m=+9772.964372450" observedRunningTime="2025-12-15 09:34:07.996588039 +0000 UTC m=+9773.567730950" watchObservedRunningTime="2025-12-15 09:34:08.000140314 +0000 UTC m=+9773.571283225" Dec 15 09:34:10 crc kubenswrapper[4876]: I1215 09:34:10.610627 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:10 crc kubenswrapper[4876]: I1215 09:34:10.611067 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:10 crc kubenswrapper[4876]: I1215 09:34:10.669012 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:10 crc kubenswrapper[4876]: I1215 09:34:10.800600 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:10 crc kubenswrapper[4876]: I1215 09:34:10.800648 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:11 crc kubenswrapper[4876]: I1215 09:34:11.073897 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:11 crc kubenswrapper[4876]: I1215 09:34:11.661376 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vfw7w"] Dec 15 09:34:11 crc kubenswrapper[4876]: I1215 09:34:11.851247 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9nr99" podUID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerName="registry-server" probeResult="failure" output=< Dec 15 09:34:11 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 09:34:11 crc kubenswrapper[4876]: > Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.044082 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vfw7w" podUID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerName="registry-server" containerID="cri-o://4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0" gracePeriod=2 Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.580323 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.658417 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-catalog-content\") pod \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.658593 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-utilities\") pod \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.659323 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-utilities" (OuterVolumeSpecName: "utilities") pod "986d5526-05e9-4da5-a9c7-fdfba020bbf9" (UID: "986d5526-05e9-4da5-a9c7-fdfba020bbf9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.659442 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77hnn\" (UniqueName: \"kubernetes.io/projected/986d5526-05e9-4da5-a9c7-fdfba020bbf9-kube-api-access-77hnn\") pod \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\" (UID: \"986d5526-05e9-4da5-a9c7-fdfba020bbf9\") " Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.661339 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.667376 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/986d5526-05e9-4da5-a9c7-fdfba020bbf9-kube-api-access-77hnn" (OuterVolumeSpecName: "kube-api-access-77hnn") pod "986d5526-05e9-4da5-a9c7-fdfba020bbf9" (UID: "986d5526-05e9-4da5-a9c7-fdfba020bbf9"). InnerVolumeSpecName "kube-api-access-77hnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.682393 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "986d5526-05e9-4da5-a9c7-fdfba020bbf9" (UID: "986d5526-05e9-4da5-a9c7-fdfba020bbf9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.763365 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77hnn\" (UniqueName: \"kubernetes.io/projected/986d5526-05e9-4da5-a9c7-fdfba020bbf9-kube-api-access-77hnn\") on node \"crc\" DevicePath \"\"" Dec 15 09:34:13 crc kubenswrapper[4876]: I1215 09:34:13.763401 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/986d5526-05e9-4da5-a9c7-fdfba020bbf9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.058009 4876 generic.go:334] "Generic (PLEG): container finished" podID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerID="4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0" exitCode=0 Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.058073 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vfw7w" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.058085 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vfw7w" event={"ID":"986d5526-05e9-4da5-a9c7-fdfba020bbf9","Type":"ContainerDied","Data":"4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0"} Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.058215 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vfw7w" event={"ID":"986d5526-05e9-4da5-a9c7-fdfba020bbf9","Type":"ContainerDied","Data":"14d2332d09c304af082bf33fd4a1dfae0432b1adba0ec86932807dcb88ef73e2"} Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.058247 4876 scope.go:117] "RemoveContainer" containerID="4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.100030 4876 scope.go:117] "RemoveContainer" containerID="7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.117315 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vfw7w"] Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.236800 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vfw7w"] Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.267384 4876 scope.go:117] "RemoveContainer" containerID="af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.322337 4876 scope.go:117] "RemoveContainer" containerID="4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0" Dec 15 09:34:14 crc kubenswrapper[4876]: E1215 09:34:14.322812 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0\": container with ID starting with 4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0 not found: ID does not exist" containerID="4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.322850 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0"} err="failed to get container status \"4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0\": rpc error: code = NotFound desc = could not find container \"4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0\": container with ID starting with 4a06d494373b7dfb782bf210e5c2cedd28b194c964d50f9b42fac414cd87b6b0 not found: ID does not exist" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.322878 4876 scope.go:117] "RemoveContainer" containerID="7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5" Dec 15 09:34:14 crc kubenswrapper[4876]: E1215 09:34:14.323180 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5\": container with ID starting with 7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5 not found: ID does not exist" containerID="7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.323216 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5"} err="failed to get container status \"7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5\": rpc error: code = NotFound desc = could not find container \"7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5\": container with ID starting with 7f3b06de36750f4f8577f4d57ad5ba4682c7ccb84352663b301df9bb3e7827a5 not found: ID does not exist" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.323234 4876 scope.go:117] "RemoveContainer" containerID="af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7" Dec 15 09:34:14 crc kubenswrapper[4876]: E1215 09:34:14.323478 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7\": container with ID starting with af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7 not found: ID does not exist" containerID="af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.323505 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7"} err="failed to get container status \"af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7\": rpc error: code = NotFound desc = could not find container \"af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7\": container with ID starting with af576e4335f988289bb9c3843ab8aa18f46797a43a78f45ebaaee8edde6686b7 not found: ID does not exist" Dec 15 09:34:14 crc kubenswrapper[4876]: I1215 09:34:14.726627 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" path="/var/lib/kubelet/pods/986d5526-05e9-4da5-a9c7-fdfba020bbf9/volumes" Dec 15 09:34:20 crc kubenswrapper[4876]: I1215 09:34:20.853591 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:20 crc kubenswrapper[4876]: I1215 09:34:20.907926 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:24 crc kubenswrapper[4876]: I1215 09:34:24.336987 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9nr99"] Dec 15 09:34:24 crc kubenswrapper[4876]: I1215 09:34:24.337964 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9nr99" podUID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerName="registry-server" containerID="cri-o://ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365" gracePeriod=2 Dec 15 09:34:24 crc kubenswrapper[4876]: I1215 09:34:24.875952 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:24 crc kubenswrapper[4876]: I1215 09:34:24.977905 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hx7zc\" (UniqueName: \"kubernetes.io/projected/110b0c5b-367a-4478-8e6f-0dc9a071e03a-kube-api-access-hx7zc\") pod \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " Dec 15 09:34:24 crc kubenswrapper[4876]: I1215 09:34:24.978098 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-utilities\") pod \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " Dec 15 09:34:24 crc kubenswrapper[4876]: I1215 09:34:24.978256 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-catalog-content\") pod \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\" (UID: \"110b0c5b-367a-4478-8e6f-0dc9a071e03a\") " Dec 15 09:34:24 crc kubenswrapper[4876]: I1215 09:34:24.979496 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-utilities" (OuterVolumeSpecName: "utilities") pod "110b0c5b-367a-4478-8e6f-0dc9a071e03a" (UID: "110b0c5b-367a-4478-8e6f-0dc9a071e03a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:34:24 crc kubenswrapper[4876]: I1215 09:34:24.998742 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/110b0c5b-367a-4478-8e6f-0dc9a071e03a-kube-api-access-hx7zc" (OuterVolumeSpecName: "kube-api-access-hx7zc") pod "110b0c5b-367a-4478-8e6f-0dc9a071e03a" (UID: "110b0c5b-367a-4478-8e6f-0dc9a071e03a"). InnerVolumeSpecName "kube-api-access-hx7zc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.080005 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.080036 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hx7zc\" (UniqueName: \"kubernetes.io/projected/110b0c5b-367a-4478-8e6f-0dc9a071e03a-kube-api-access-hx7zc\") on node \"crc\" DevicePath \"\"" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.101070 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "110b0c5b-367a-4478-8e6f-0dc9a071e03a" (UID: "110b0c5b-367a-4478-8e6f-0dc9a071e03a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.181565 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/110b0c5b-367a-4478-8e6f-0dc9a071e03a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.188192 4876 generic.go:334] "Generic (PLEG): container finished" podID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerID="ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365" exitCode=0 Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.188232 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9nr99" event={"ID":"110b0c5b-367a-4478-8e6f-0dc9a071e03a","Type":"ContainerDied","Data":"ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365"} Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.188266 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9nr99" event={"ID":"110b0c5b-367a-4478-8e6f-0dc9a071e03a","Type":"ContainerDied","Data":"80e5fac1ce199fc86b39263d827778540e9c022b4bf75c669f37ac86f1db025b"} Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.188275 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9nr99" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.188284 4876 scope.go:117] "RemoveContainer" containerID="ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.224072 4876 scope.go:117] "RemoveContainer" containerID="4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.228703 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9nr99"] Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.237547 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9nr99"] Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.250271 4876 scope.go:117] "RemoveContainer" containerID="5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.297030 4876 scope.go:117] "RemoveContainer" containerID="ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365" Dec 15 09:34:25 crc kubenswrapper[4876]: E1215 09:34:25.297944 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365\": container with ID starting with ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365 not found: ID does not exist" containerID="ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.297974 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365"} err="failed to get container status \"ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365\": rpc error: code = NotFound desc = could not find container \"ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365\": container with ID starting with ce6ab607ed2045b05091fb3f8d59c7aefb78926984dd254d6c051f369e6c8365 not found: ID does not exist" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.298001 4876 scope.go:117] "RemoveContainer" containerID="4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420" Dec 15 09:34:25 crc kubenswrapper[4876]: E1215 09:34:25.299654 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420\": container with ID starting with 4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420 not found: ID does not exist" containerID="4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.299781 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420"} err="failed to get container status \"4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420\": rpc error: code = NotFound desc = could not find container \"4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420\": container with ID starting with 4992e9c74a7faa2f618f5df23165b6af0213fd64b1e362acb62dc3e4f3882420 not found: ID does not exist" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.299850 4876 scope.go:117] "RemoveContainer" containerID="5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29" Dec 15 09:34:25 crc kubenswrapper[4876]: E1215 09:34:25.300397 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29\": container with ID starting with 5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29 not found: ID does not exist" containerID="5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29" Dec 15 09:34:25 crc kubenswrapper[4876]: I1215 09:34:25.300497 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29"} err="failed to get container status \"5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29\": rpc error: code = NotFound desc = could not find container \"5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29\": container with ID starting with 5325b0595d31dd377ac1092763941b5b643fca42317d6c2c0c8f25821737dc29 not found: ID does not exist" Dec 15 09:34:26 crc kubenswrapper[4876]: I1215 09:34:26.717600 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" path="/var/lib/kubelet/pods/110b0c5b-367a-4478-8e6f-0dc9a071e03a/volumes" Dec 15 09:34:27 crc kubenswrapper[4876]: I1215 09:34:27.322591 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:34:27 crc kubenswrapper[4876]: I1215 09:34:27.322965 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:34:57 crc kubenswrapper[4876]: I1215 09:34:57.323348 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:34:57 crc kubenswrapper[4876]: I1215 09:34:57.324549 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:34:57 crc kubenswrapper[4876]: I1215 09:34:57.324614 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:34:57 crc kubenswrapper[4876]: I1215 09:34:57.325425 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:34:57 crc kubenswrapper[4876]: I1215 09:34:57.325493 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" gracePeriod=600 Dec 15 09:34:57 crc kubenswrapper[4876]: E1215 09:34:57.458246 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:34:57 crc kubenswrapper[4876]: I1215 09:34:57.557910 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" exitCode=0 Dec 15 09:34:57 crc kubenswrapper[4876]: I1215 09:34:57.557960 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba"} Dec 15 09:34:57 crc kubenswrapper[4876]: I1215 09:34:57.557991 4876 scope.go:117] "RemoveContainer" containerID="0d32f59ca1317b802ef07bcee10b3142637a4f83c159ca3f197305e9095c6be3" Dec 15 09:34:57 crc kubenswrapper[4876]: I1215 09:34:57.559238 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:34:57 crc kubenswrapper[4876]: E1215 09:34:57.559686 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:35:08 crc kubenswrapper[4876]: I1215 09:35:08.707158 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:35:08 crc kubenswrapper[4876]: E1215 09:35:08.708285 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:35:22 crc kubenswrapper[4876]: I1215 09:35:22.705906 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:35:22 crc kubenswrapper[4876]: E1215 09:35:22.706821 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:35:36 crc kubenswrapper[4876]: I1215 09:35:36.705754 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:35:36 crc kubenswrapper[4876]: E1215 09:35:36.706606 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:35:50 crc kubenswrapper[4876]: I1215 09:35:50.705465 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:35:50 crc kubenswrapper[4876]: E1215 09:35:50.706347 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:36:02 crc kubenswrapper[4876]: I1215 09:36:02.705679 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:36:02 crc kubenswrapper[4876]: E1215 09:36:02.706582 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:36:13 crc kubenswrapper[4876]: I1215 09:36:13.706264 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:36:13 crc kubenswrapper[4876]: E1215 09:36:13.708835 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.817981 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c4t2h"] Dec 15 09:36:18 crc kubenswrapper[4876]: E1215 09:36:18.819128 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerName="extract-content" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.819152 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerName="extract-content" Dec 15 09:36:18 crc kubenswrapper[4876]: E1215 09:36:18.819185 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerName="extract-content" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.819194 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerName="extract-content" Dec 15 09:36:18 crc kubenswrapper[4876]: E1215 09:36:18.819216 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerName="registry-server" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.819223 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerName="registry-server" Dec 15 09:36:18 crc kubenswrapper[4876]: E1215 09:36:18.819233 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerName="extract-utilities" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.819241 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerName="extract-utilities" Dec 15 09:36:18 crc kubenswrapper[4876]: E1215 09:36:18.819269 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerName="extract-utilities" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.819277 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerName="extract-utilities" Dec 15 09:36:18 crc kubenswrapper[4876]: E1215 09:36:18.819292 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerName="registry-server" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.819298 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerName="registry-server" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.819544 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="986d5526-05e9-4da5-a9c7-fdfba020bbf9" containerName="registry-server" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.819574 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="110b0c5b-367a-4478-8e6f-0dc9a071e03a" containerName="registry-server" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.825003 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.841372 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c4t2h"] Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.914295 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-catalog-content\") pod \"certified-operators-c4t2h\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.914393 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-utilities\") pod \"certified-operators-c4t2h\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:18 crc kubenswrapper[4876]: I1215 09:36:18.914444 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv992\" (UniqueName: \"kubernetes.io/projected/bb8c43a3-0851-4d55-8d3d-b8e960c92117-kube-api-access-dv992\") pod \"certified-operators-c4t2h\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:19 crc kubenswrapper[4876]: I1215 09:36:19.016311 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv992\" (UniqueName: \"kubernetes.io/projected/bb8c43a3-0851-4d55-8d3d-b8e960c92117-kube-api-access-dv992\") pod \"certified-operators-c4t2h\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:19 crc kubenswrapper[4876]: I1215 09:36:19.016502 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-catalog-content\") pod \"certified-operators-c4t2h\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:19 crc kubenswrapper[4876]: I1215 09:36:19.016547 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-utilities\") pod \"certified-operators-c4t2h\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:19 crc kubenswrapper[4876]: I1215 09:36:19.017014 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-utilities\") pod \"certified-operators-c4t2h\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:19 crc kubenswrapper[4876]: I1215 09:36:19.017247 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-catalog-content\") pod \"certified-operators-c4t2h\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:19 crc kubenswrapper[4876]: I1215 09:36:19.037955 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv992\" (UniqueName: \"kubernetes.io/projected/bb8c43a3-0851-4d55-8d3d-b8e960c92117-kube-api-access-dv992\") pod \"certified-operators-c4t2h\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:19 crc kubenswrapper[4876]: I1215 09:36:19.146904 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:19 crc kubenswrapper[4876]: I1215 09:36:19.748468 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c4t2h"] Dec 15 09:36:20 crc kubenswrapper[4876]: I1215 09:36:20.356312 4876 generic.go:334] "Generic (PLEG): container finished" podID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerID="d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7" exitCode=0 Dec 15 09:36:20 crc kubenswrapper[4876]: I1215 09:36:20.356372 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4t2h" event={"ID":"bb8c43a3-0851-4d55-8d3d-b8e960c92117","Type":"ContainerDied","Data":"d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7"} Dec 15 09:36:20 crc kubenswrapper[4876]: I1215 09:36:20.356409 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4t2h" event={"ID":"bb8c43a3-0851-4d55-8d3d-b8e960c92117","Type":"ContainerStarted","Data":"b5d458f10ee50f0b6502c89cfde4adaef6387ad251aa55eb04dfa6af239c5b2e"} Dec 15 09:36:20 crc kubenswrapper[4876]: I1215 09:36:20.358496 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 09:36:21 crc kubenswrapper[4876]: I1215 09:36:21.372027 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4t2h" event={"ID":"bb8c43a3-0851-4d55-8d3d-b8e960c92117","Type":"ContainerStarted","Data":"5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6"} Dec 15 09:36:22 crc kubenswrapper[4876]: I1215 09:36:22.385696 4876 generic.go:334] "Generic (PLEG): container finished" podID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerID="5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6" exitCode=0 Dec 15 09:36:22 crc kubenswrapper[4876]: I1215 09:36:22.385848 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4t2h" event={"ID":"bb8c43a3-0851-4d55-8d3d-b8e960c92117","Type":"ContainerDied","Data":"5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6"} Dec 15 09:36:23 crc kubenswrapper[4876]: I1215 09:36:23.396762 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4t2h" event={"ID":"bb8c43a3-0851-4d55-8d3d-b8e960c92117","Type":"ContainerStarted","Data":"7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7"} Dec 15 09:36:23 crc kubenswrapper[4876]: I1215 09:36:23.429813 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c4t2h" podStartSLOduration=2.820602546 podStartE2EDuration="5.429760902s" podCreationTimestamp="2025-12-15 09:36:18 +0000 UTC" firstStartedPulling="2025-12-15 09:36:20.358265779 +0000 UTC m=+9905.929408690" lastFinishedPulling="2025-12-15 09:36:22.967424135 +0000 UTC m=+9908.538567046" observedRunningTime="2025-12-15 09:36:23.417869284 +0000 UTC m=+9908.989012215" watchObservedRunningTime="2025-12-15 09:36:23.429760902 +0000 UTC m=+9909.000903823" Dec 15 09:36:25 crc kubenswrapper[4876]: I1215 09:36:25.706426 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:36:25 crc kubenswrapper[4876]: E1215 09:36:25.707490 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:36:29 crc kubenswrapper[4876]: I1215 09:36:29.147239 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:29 crc kubenswrapper[4876]: I1215 09:36:29.147865 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:29 crc kubenswrapper[4876]: I1215 09:36:29.204972 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:29 crc kubenswrapper[4876]: I1215 09:36:29.510058 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:29 crc kubenswrapper[4876]: I1215 09:36:29.571682 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c4t2h"] Dec 15 09:36:31 crc kubenswrapper[4876]: I1215 09:36:31.476862 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c4t2h" podUID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerName="registry-server" containerID="cri-o://7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7" gracePeriod=2 Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.008651 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.123564 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv992\" (UniqueName: \"kubernetes.io/projected/bb8c43a3-0851-4d55-8d3d-b8e960c92117-kube-api-access-dv992\") pod \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.125244 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-catalog-content\") pod \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.136471 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-utilities\") pod \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\" (UID: \"bb8c43a3-0851-4d55-8d3d-b8e960c92117\") " Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.142424 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-utilities" (OuterVolumeSpecName: "utilities") pod "bb8c43a3-0851-4d55-8d3d-b8e960c92117" (UID: "bb8c43a3-0851-4d55-8d3d-b8e960c92117"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.145344 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb8c43a3-0851-4d55-8d3d-b8e960c92117-kube-api-access-dv992" (OuterVolumeSpecName: "kube-api-access-dv992") pod "bb8c43a3-0851-4d55-8d3d-b8e960c92117" (UID: "bb8c43a3-0851-4d55-8d3d-b8e960c92117"). InnerVolumeSpecName "kube-api-access-dv992". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.196680 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb8c43a3-0851-4d55-8d3d-b8e960c92117" (UID: "bb8c43a3-0851-4d55-8d3d-b8e960c92117"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.239837 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv992\" (UniqueName: \"kubernetes.io/projected/bb8c43a3-0851-4d55-8d3d-b8e960c92117-kube-api-access-dv992\") on node \"crc\" DevicePath \"\"" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.239868 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.239877 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb8c43a3-0851-4d55-8d3d-b8e960c92117-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.492961 4876 generic.go:334] "Generic (PLEG): container finished" podID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerID="7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7" exitCode=0 Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.493005 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4t2h" event={"ID":"bb8c43a3-0851-4d55-8d3d-b8e960c92117","Type":"ContainerDied","Data":"7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7"} Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.493031 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c4t2h" event={"ID":"bb8c43a3-0851-4d55-8d3d-b8e960c92117","Type":"ContainerDied","Data":"b5d458f10ee50f0b6502c89cfde4adaef6387ad251aa55eb04dfa6af239c5b2e"} Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.493044 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c4t2h" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.493055 4876 scope.go:117] "RemoveContainer" containerID="7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.516608 4876 scope.go:117] "RemoveContainer" containerID="5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.537438 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c4t2h"] Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.551797 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c4t2h"] Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.552008 4876 scope.go:117] "RemoveContainer" containerID="d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.611065 4876 scope.go:117] "RemoveContainer" containerID="7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7" Dec 15 09:36:32 crc kubenswrapper[4876]: E1215 09:36:32.611752 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7\": container with ID starting with 7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7 not found: ID does not exist" containerID="7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.611791 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7"} err="failed to get container status \"7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7\": rpc error: code = NotFound desc = could not find container \"7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7\": container with ID starting with 7de69c5cab3c3ca867e6c60b7fcd3806cc9d68b14fb13609c6363d44ce1653a7 not found: ID does not exist" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.611817 4876 scope.go:117] "RemoveContainer" containerID="5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6" Dec 15 09:36:32 crc kubenswrapper[4876]: E1215 09:36:32.612430 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6\": container with ID starting with 5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6 not found: ID does not exist" containerID="5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.612483 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6"} err="failed to get container status \"5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6\": rpc error: code = NotFound desc = could not find container \"5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6\": container with ID starting with 5728c15ce3311d22264402c4fe93a66442055450220a4fe8582636af777613e6 not found: ID does not exist" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.612513 4876 scope.go:117] "RemoveContainer" containerID="d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7" Dec 15 09:36:32 crc kubenswrapper[4876]: E1215 09:36:32.612909 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7\": container with ID starting with d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7 not found: ID does not exist" containerID="d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.612939 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7"} err="failed to get container status \"d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7\": rpc error: code = NotFound desc = could not find container \"d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7\": container with ID starting with d7af70ae3920dee7f01b5c82ac3fa6b212bddb74a4585786196bf3e9b75420d7 not found: ID does not exist" Dec 15 09:36:32 crc kubenswrapper[4876]: I1215 09:36:32.718467 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" path="/var/lib/kubelet/pods/bb8c43a3-0851-4d55-8d3d-b8e960c92117/volumes" Dec 15 09:36:39 crc kubenswrapper[4876]: I1215 09:36:39.705989 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:36:39 crc kubenswrapper[4876]: E1215 09:36:39.707998 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:36:52 crc kubenswrapper[4876]: I1215 09:36:52.706483 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:36:52 crc kubenswrapper[4876]: E1215 09:36:52.707334 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:37:07 crc kubenswrapper[4876]: I1215 09:37:07.706086 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:37:07 crc kubenswrapper[4876]: E1215 09:37:07.706881 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:37:18 crc kubenswrapper[4876]: I1215 09:37:18.705408 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:37:18 crc kubenswrapper[4876]: E1215 09:37:18.706157 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:37:32 crc kubenswrapper[4876]: I1215 09:37:32.706699 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:37:32 crc kubenswrapper[4876]: E1215 09:37:32.707568 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:37:44 crc kubenswrapper[4876]: I1215 09:37:44.713800 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:37:44 crc kubenswrapper[4876]: E1215 09:37:44.714927 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:37:55 crc kubenswrapper[4876]: I1215 09:37:55.705347 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:37:55 crc kubenswrapper[4876]: E1215 09:37:55.706201 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:38:10 crc kubenswrapper[4876]: I1215 09:38:10.705934 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:38:10 crc kubenswrapper[4876]: E1215 09:38:10.706881 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:38:24 crc kubenswrapper[4876]: I1215 09:38:24.719600 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:38:24 crc kubenswrapper[4876]: E1215 09:38:24.720380 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:38:38 crc kubenswrapper[4876]: I1215 09:38:38.706218 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:38:38 crc kubenswrapper[4876]: E1215 09:38:38.707061 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:38:52 crc kubenswrapper[4876]: I1215 09:38:52.707366 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:38:52 crc kubenswrapper[4876]: E1215 09:38:52.708795 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:39:07 crc kubenswrapper[4876]: I1215 09:39:07.706221 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:39:07 crc kubenswrapper[4876]: E1215 09:39:07.707084 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:39:21 crc kubenswrapper[4876]: I1215 09:39:21.705352 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:39:21 crc kubenswrapper[4876]: E1215 09:39:21.706161 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:39:27 crc kubenswrapper[4876]: I1215 09:39:27.335278 4876 generic.go:334] "Generic (PLEG): container finished" podID="bad1d359-4c34-4059-9eb0-f2f77d5bf834" containerID="ae5934cf9f17ba36be2633278617ae511c1924bb26d332b975fedebf34e7bc02" exitCode=0 Dec 15 09:39:27 crc kubenswrapper[4876]: I1215 09:39:27.335365 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" event={"ID":"bad1d359-4c34-4059-9eb0-f2f77d5bf834","Type":"ContainerDied","Data":"ae5934cf9f17ba36be2633278617ae511c1924bb26d332b975fedebf34e7bc02"} Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.784884 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.952523 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqmbr\" (UniqueName: \"kubernetes.io/projected/bad1d359-4c34-4059-9eb0-f2f77d5bf834-kube-api-access-nqmbr\") pod \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.952615 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceph\") pod \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.952692 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-0\") pod \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.952771 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ssh-key\") pod \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.952810 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-inventory\") pod \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.953085 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-1\") pod \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.953220 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-telemetry-combined-ca-bundle\") pod \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.953320 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-2\") pod \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\" (UID: \"bad1d359-4c34-4059-9eb0-f2f77d5bf834\") " Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.962419 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "bad1d359-4c34-4059-9eb0-f2f77d5bf834" (UID: "bad1d359-4c34-4059-9eb0-f2f77d5bf834"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.962499 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bad1d359-4c34-4059-9eb0-f2f77d5bf834-kube-api-access-nqmbr" (OuterVolumeSpecName: "kube-api-access-nqmbr") pod "bad1d359-4c34-4059-9eb0-f2f77d5bf834" (UID: "bad1d359-4c34-4059-9eb0-f2f77d5bf834"). InnerVolumeSpecName "kube-api-access-nqmbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.962459 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceph" (OuterVolumeSpecName: "ceph") pod "bad1d359-4c34-4059-9eb0-f2f77d5bf834" (UID: "bad1d359-4c34-4059-9eb0-f2f77d5bf834"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.981242 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "bad1d359-4c34-4059-9eb0-f2f77d5bf834" (UID: "bad1d359-4c34-4059-9eb0-f2f77d5bf834"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.985440 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "bad1d359-4c34-4059-9eb0-f2f77d5bf834" (UID: "bad1d359-4c34-4059-9eb0-f2f77d5bf834"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.987538 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bad1d359-4c34-4059-9eb0-f2f77d5bf834" (UID: "bad1d359-4c34-4059-9eb0-f2f77d5bf834"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.989094 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "bad1d359-4c34-4059-9eb0-f2f77d5bf834" (UID: "bad1d359-4c34-4059-9eb0-f2f77d5bf834"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:39:28 crc kubenswrapper[4876]: I1215 09:39:28.990578 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-inventory" (OuterVolumeSpecName: "inventory") pod "bad1d359-4c34-4059-9eb0-f2f77d5bf834" (UID: "bad1d359-4c34-4059-9eb0-f2f77d5bf834"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.057142 4876 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.057189 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqmbr\" (UniqueName: \"kubernetes.io/projected/bad1d359-4c34-4059-9eb0-f2f77d5bf834-kube-api-access-nqmbr\") on node \"crc\" DevicePath \"\"" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.057204 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.057216 4876 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.057229 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.057243 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.057254 4876 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.057268 4876 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad1d359-4c34-4059-9eb0-f2f77d5bf834-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.355427 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" event={"ID":"bad1d359-4c34-4059-9eb0-f2f77d5bf834","Type":"ContainerDied","Data":"d2325725f9a6d78573522434a596101aa506487e6843d8037cdff01c955c70d5"} Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.355744 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2325725f9a6d78573522434a596101aa506487e6843d8037cdff01c955c70d5" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.355479 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-nb9mg" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.507217 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-78w4n"] Dec 15 09:39:29 crc kubenswrapper[4876]: E1215 09:39:29.507706 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerName="extract-content" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.507722 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerName="extract-content" Dec 15 09:39:29 crc kubenswrapper[4876]: E1215 09:39:29.507743 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bad1d359-4c34-4059-9eb0-f2f77d5bf834" containerName="telemetry-openstack-openstack-cell1" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.507749 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bad1d359-4c34-4059-9eb0-f2f77d5bf834" containerName="telemetry-openstack-openstack-cell1" Dec 15 09:39:29 crc kubenswrapper[4876]: E1215 09:39:29.507787 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerName="registry-server" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.507793 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerName="registry-server" Dec 15 09:39:29 crc kubenswrapper[4876]: E1215 09:39:29.507804 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerName="extract-utilities" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.507810 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerName="extract-utilities" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.508023 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="bad1d359-4c34-4059-9eb0-f2f77d5bf834" containerName="telemetry-openstack-openstack-cell1" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.508043 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb8c43a3-0851-4d55-8d3d-b8e960c92117" containerName="registry-server" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.508754 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.512792 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.518203 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.518356 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.518440 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.518557 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.521946 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-78w4n"] Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.566053 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.566351 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.566475 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.566559 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.566697 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.566855 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6h9b\" (UniqueName: \"kubernetes.io/projected/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-kube-api-access-l6h9b\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.668221 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.669350 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6h9b\" (UniqueName: \"kubernetes.io/projected/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-kube-api-access-l6h9b\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.669571 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.669671 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.669751 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.669774 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.673263 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.673761 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.674219 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.675845 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.676443 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.686728 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6h9b\" (UniqueName: \"kubernetes.io/projected/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-kube-api-access-l6h9b\") pod \"neutron-sriov-openstack-openstack-cell1-78w4n\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:29 crc kubenswrapper[4876]: I1215 09:39:29.826453 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:39:30 crc kubenswrapper[4876]: I1215 09:39:30.342033 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-78w4n"] Dec 15 09:39:30 crc kubenswrapper[4876]: I1215 09:39:30.366683 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" event={"ID":"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23","Type":"ContainerStarted","Data":"a1e79a71e9eb6b3bbdb76f1987c93a6bc7ed1891bd424d06eb415d1d7dd0f03f"} Dec 15 09:39:31 crc kubenswrapper[4876]: I1215 09:39:31.377531 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" event={"ID":"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23","Type":"ContainerStarted","Data":"a716d7126f9a4eba5c4722d4493db4cdce683293223e3c7463dd3a581aca7b2c"} Dec 15 09:39:31 crc kubenswrapper[4876]: I1215 09:39:31.397719 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" podStartSLOduration=1.7218588110000002 podStartE2EDuration="2.397668883s" podCreationTimestamp="2025-12-15 09:39:29 +0000 UTC" firstStartedPulling="2025-12-15 09:39:30.352592129 +0000 UTC m=+10095.923735030" lastFinishedPulling="2025-12-15 09:39:31.028402191 +0000 UTC m=+10096.599545102" observedRunningTime="2025-12-15 09:39:31.392955257 +0000 UTC m=+10096.964098188" watchObservedRunningTime="2025-12-15 09:39:31.397668883 +0000 UTC m=+10096.968811804" Dec 15 09:39:33 crc kubenswrapper[4876]: I1215 09:39:33.705575 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:39:33 crc kubenswrapper[4876]: E1215 09:39:33.706681 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:39:47 crc kubenswrapper[4876]: I1215 09:39:47.705924 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:39:47 crc kubenswrapper[4876]: E1215 09:39:47.706738 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:39:58 crc kubenswrapper[4876]: I1215 09:39:58.706175 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:39:59 crc kubenswrapper[4876]: I1215 09:39:59.673416 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"8fc098a01dff252a30a9a0024f79e3ca6a9041be1255fd833c09624f277ff50a"} Dec 15 09:40:15 crc kubenswrapper[4876]: I1215 09:40:15.841341 4876 generic.go:334] "Generic (PLEG): container finished" podID="b53427f3-ee4e-41ab-ab2c-bfae9d73dc23" containerID="a716d7126f9a4eba5c4722d4493db4cdce683293223e3c7463dd3a581aca7b2c" exitCode=0 Dec 15 09:40:15 crc kubenswrapper[4876]: I1215 09:40:15.841486 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" event={"ID":"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23","Type":"ContainerDied","Data":"a716d7126f9a4eba5c4722d4493db4cdce683293223e3c7463dd3a581aca7b2c"} Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.550613 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.678942 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ceph\") pod \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.679015 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-agent-neutron-config-0\") pod \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.679168 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-inventory\") pod \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.679204 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6h9b\" (UniqueName: \"kubernetes.io/projected/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-kube-api-access-l6h9b\") pod \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.679238 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-combined-ca-bundle\") pod \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.679294 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ssh-key\") pod \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\" (UID: \"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23\") " Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.685724 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23" (UID: "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.685845 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ceph" (OuterVolumeSpecName: "ceph") pod "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23" (UID: "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.686023 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-kube-api-access-l6h9b" (OuterVolumeSpecName: "kube-api-access-l6h9b") pod "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23" (UID: "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23"). InnerVolumeSpecName "kube-api-access-l6h9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.710835 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23" (UID: "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.712813 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-inventory" (OuterVolumeSpecName: "inventory") pod "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23" (UID: "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.719218 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23" (UID: "b53427f3-ee4e-41ab-ab2c-bfae9d73dc23"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.782046 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.782083 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.782098 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.782126 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6h9b\" (UniqueName: \"kubernetes.io/projected/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-kube-api-access-l6h9b\") on node \"crc\" DevicePath \"\"" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.782140 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.782151 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b53427f3-ee4e-41ab-ab2c-bfae9d73dc23-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.862528 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" event={"ID":"b53427f3-ee4e-41ab-ab2c-bfae9d73dc23","Type":"ContainerDied","Data":"a1e79a71e9eb6b3bbdb76f1987c93a6bc7ed1891bd424d06eb415d1d7dd0f03f"} Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.862575 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1e79a71e9eb6b3bbdb76f1987c93a6bc7ed1891bd424d06eb415d1d7dd0f03f" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.862578 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-78w4n" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.958074 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr"] Dec 15 09:40:17 crc kubenswrapper[4876]: E1215 09:40:17.958575 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b53427f3-ee4e-41ab-ab2c-bfae9d73dc23" containerName="neutron-sriov-openstack-openstack-cell1" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.958595 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="b53427f3-ee4e-41ab-ab2c-bfae9d73dc23" containerName="neutron-sriov-openstack-openstack-cell1" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.958782 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="b53427f3-ee4e-41ab-ab2c-bfae9d73dc23" containerName="neutron-sriov-openstack-openstack-cell1" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.959486 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.961341 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.961445 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.962068 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.962330 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.962569 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 15 09:40:17 crc kubenswrapper[4876]: I1215 09:40:17.981323 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr"] Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.087503 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.087768 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.087804 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.087841 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.087934 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.088017 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmns7\" (UniqueName: \"kubernetes.io/projected/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-kube-api-access-tmns7\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.190423 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmns7\" (UniqueName: \"kubernetes.io/projected/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-kube-api-access-tmns7\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.190601 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.190647 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.190694 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.190741 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.190813 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.196380 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.197349 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.197813 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.200169 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.200379 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.216312 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmns7\" (UniqueName: \"kubernetes.io/projected/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-kube-api-access-tmns7\") pod \"neutron-dhcp-openstack-openstack-cell1-tk9mr\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.277263 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.801368 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr"] Dec 15 09:40:18 crc kubenswrapper[4876]: W1215 09:40:18.805699 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69d2e77c_a14c_43ab_ac11_90cd6e7808f5.slice/crio-d5a85bba77fdc9cd9ff28cbfb6ac034924199336805c4959b14d45fc2a670bca WatchSource:0}: Error finding container d5a85bba77fdc9cd9ff28cbfb6ac034924199336805c4959b14d45fc2a670bca: Status 404 returned error can't find the container with id d5a85bba77fdc9cd9ff28cbfb6ac034924199336805c4959b14d45fc2a670bca Dec 15 09:40:18 crc kubenswrapper[4876]: I1215 09:40:18.873258 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" event={"ID":"69d2e77c-a14c-43ab-ac11-90cd6e7808f5","Type":"ContainerStarted","Data":"d5a85bba77fdc9cd9ff28cbfb6ac034924199336805c4959b14d45fc2a670bca"} Dec 15 09:40:19 crc kubenswrapper[4876]: I1215 09:40:19.883387 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" event={"ID":"69d2e77c-a14c-43ab-ac11-90cd6e7808f5","Type":"ContainerStarted","Data":"58fcf22fdb47e84c03917eb7157c8d941f0efbb71d8024d619e114febcc3acc0"} Dec 15 09:40:19 crc kubenswrapper[4876]: I1215 09:40:19.904284 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" podStartSLOduration=2.347635354 podStartE2EDuration="2.904266581s" podCreationTimestamp="2025-12-15 09:40:17 +0000 UTC" firstStartedPulling="2025-12-15 09:40:18.808027915 +0000 UTC m=+10144.379170826" lastFinishedPulling="2025-12-15 09:40:19.364659142 +0000 UTC m=+10144.935802053" observedRunningTime="2025-12-15 09:40:19.899216167 +0000 UTC m=+10145.470359078" watchObservedRunningTime="2025-12-15 09:40:19.904266581 +0000 UTC m=+10145.475409492" Dec 15 09:41:09 crc kubenswrapper[4876]: I1215 09:41:09.956791 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zgb74"] Dec 15 09:41:09 crc kubenswrapper[4876]: I1215 09:41:09.959304 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:09 crc kubenswrapper[4876]: I1215 09:41:09.999651 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zgb74"] Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.039366 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-catalog-content\") pod \"community-operators-zgb74\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.039750 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmm7l\" (UniqueName: \"kubernetes.io/projected/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-kube-api-access-dmm7l\") pod \"community-operators-zgb74\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.040223 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-utilities\") pod \"community-operators-zgb74\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.141799 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmm7l\" (UniqueName: \"kubernetes.io/projected/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-kube-api-access-dmm7l\") pod \"community-operators-zgb74\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.142255 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-utilities\") pod \"community-operators-zgb74\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.142445 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-catalog-content\") pod \"community-operators-zgb74\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.142807 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-utilities\") pod \"community-operators-zgb74\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.143004 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-catalog-content\") pod \"community-operators-zgb74\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.163040 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmm7l\" (UniqueName: \"kubernetes.io/projected/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-kube-api-access-dmm7l\") pod \"community-operators-zgb74\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.281428 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:10 crc kubenswrapper[4876]: I1215 09:41:10.855355 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zgb74"] Dec 15 09:41:11 crc kubenswrapper[4876]: I1215 09:41:11.419805 4876 generic.go:334] "Generic (PLEG): container finished" podID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerID="01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559" exitCode=0 Dec 15 09:41:11 crc kubenswrapper[4876]: I1215 09:41:11.420031 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zgb74" event={"ID":"f6ebc0b7-5104-477a-80f2-e0540e5e0b31","Type":"ContainerDied","Data":"01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559"} Dec 15 09:41:11 crc kubenswrapper[4876]: I1215 09:41:11.420054 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zgb74" event={"ID":"f6ebc0b7-5104-477a-80f2-e0540e5e0b31","Type":"ContainerStarted","Data":"af5e4a760a418d43bc1c3a46ef6fb8574f019f17d16be806b19cfc7dd6550e6a"} Dec 15 09:41:13 crc kubenswrapper[4876]: I1215 09:41:13.440533 4876 generic.go:334] "Generic (PLEG): container finished" podID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerID="ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275" exitCode=0 Dec 15 09:41:13 crc kubenswrapper[4876]: I1215 09:41:13.440627 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zgb74" event={"ID":"f6ebc0b7-5104-477a-80f2-e0540e5e0b31","Type":"ContainerDied","Data":"ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275"} Dec 15 09:41:14 crc kubenswrapper[4876]: I1215 09:41:14.453274 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zgb74" event={"ID":"f6ebc0b7-5104-477a-80f2-e0540e5e0b31","Type":"ContainerStarted","Data":"6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20"} Dec 15 09:41:14 crc kubenswrapper[4876]: I1215 09:41:14.476616 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zgb74" podStartSLOduration=2.925126438 podStartE2EDuration="5.476591877s" podCreationTimestamp="2025-12-15 09:41:09 +0000 UTC" firstStartedPulling="2025-12-15 09:41:11.424424571 +0000 UTC m=+10196.995567482" lastFinishedPulling="2025-12-15 09:41:13.97589001 +0000 UTC m=+10199.547032921" observedRunningTime="2025-12-15 09:41:14.469541378 +0000 UTC m=+10200.040684289" watchObservedRunningTime="2025-12-15 09:41:14.476591877 +0000 UTC m=+10200.047734788" Dec 15 09:41:18 crc kubenswrapper[4876]: I1215 09:41:18.496053 4876 generic.go:334] "Generic (PLEG): container finished" podID="69d2e77c-a14c-43ab-ac11-90cd6e7808f5" containerID="58fcf22fdb47e84c03917eb7157c8d941f0efbb71d8024d619e114febcc3acc0" exitCode=0 Dec 15 09:41:18 crc kubenswrapper[4876]: I1215 09:41:18.496158 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" event={"ID":"69d2e77c-a14c-43ab-ac11-90cd6e7808f5","Type":"ContainerDied","Data":"58fcf22fdb47e84c03917eb7157c8d941f0efbb71d8024d619e114febcc3acc0"} Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.042822 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.087814 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-inventory\") pod \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.087957 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-agent-neutron-config-0\") pod \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.088026 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ceph\") pod \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.088125 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmns7\" (UniqueName: \"kubernetes.io/projected/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-kube-api-access-tmns7\") pod \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.088467 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ssh-key\") pod \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.088532 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-combined-ca-bundle\") pod \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\" (UID: \"69d2e77c-a14c-43ab-ac11-90cd6e7808f5\") " Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.095280 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ceph" (OuterVolumeSpecName: "ceph") pod "69d2e77c-a14c-43ab-ac11-90cd6e7808f5" (UID: "69d2e77c-a14c-43ab-ac11-90cd6e7808f5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.096469 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-kube-api-access-tmns7" (OuterVolumeSpecName: "kube-api-access-tmns7") pod "69d2e77c-a14c-43ab-ac11-90cd6e7808f5" (UID: "69d2e77c-a14c-43ab-ac11-90cd6e7808f5"). InnerVolumeSpecName "kube-api-access-tmns7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.097476 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "69d2e77c-a14c-43ab-ac11-90cd6e7808f5" (UID: "69d2e77c-a14c-43ab-ac11-90cd6e7808f5"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.123266 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-inventory" (OuterVolumeSpecName: "inventory") pod "69d2e77c-a14c-43ab-ac11-90cd6e7808f5" (UID: "69d2e77c-a14c-43ab-ac11-90cd6e7808f5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.134345 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "69d2e77c-a14c-43ab-ac11-90cd6e7808f5" (UID: "69d2e77c-a14c-43ab-ac11-90cd6e7808f5"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.135198 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "69d2e77c-a14c-43ab-ac11-90cd6e7808f5" (UID: "69d2e77c-a14c-43ab-ac11-90cd6e7808f5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.191489 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.191533 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmns7\" (UniqueName: \"kubernetes.io/projected/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-kube-api-access-tmns7\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.191546 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.191558 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.191570 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.191583 4876 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69d2e77c-a14c-43ab-ac11-90cd6e7808f5-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.282253 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.282312 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.329516 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.515376 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.515375 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-tk9mr" event={"ID":"69d2e77c-a14c-43ab-ac11-90cd6e7808f5","Type":"ContainerDied","Data":"d5a85bba77fdc9cd9ff28cbfb6ac034924199336805c4959b14d45fc2a670bca"} Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.515419 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5a85bba77fdc9cd9ff28cbfb6ac034924199336805c4959b14d45fc2a670bca" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.573995 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:20 crc kubenswrapper[4876]: I1215 09:41:20.624527 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zgb74"] Dec 15 09:41:22 crc kubenswrapper[4876]: I1215 09:41:22.534846 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zgb74" podUID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerName="registry-server" containerID="cri-o://6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20" gracePeriod=2 Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.040719 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.159566 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-utilities\") pod \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.159811 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmm7l\" (UniqueName: \"kubernetes.io/projected/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-kube-api-access-dmm7l\") pod \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.159872 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-catalog-content\") pod \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\" (UID: \"f6ebc0b7-5104-477a-80f2-e0540e5e0b31\") " Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.161456 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-utilities" (OuterVolumeSpecName: "utilities") pod "f6ebc0b7-5104-477a-80f2-e0540e5e0b31" (UID: "f6ebc0b7-5104-477a-80f2-e0540e5e0b31"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.176401 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-kube-api-access-dmm7l" (OuterVolumeSpecName: "kube-api-access-dmm7l") pod "f6ebc0b7-5104-477a-80f2-e0540e5e0b31" (UID: "f6ebc0b7-5104-477a-80f2-e0540e5e0b31"). InnerVolumeSpecName "kube-api-access-dmm7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.217174 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f6ebc0b7-5104-477a-80f2-e0540e5e0b31" (UID: "f6ebc0b7-5104-477a-80f2-e0540e5e0b31"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.262735 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.262980 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmm7l\" (UniqueName: \"kubernetes.io/projected/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-kube-api-access-dmm7l\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.263042 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f6ebc0b7-5104-477a-80f2-e0540e5e0b31-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.546230 4876 generic.go:334] "Generic (PLEG): container finished" podID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerID="6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20" exitCode=0 Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.546276 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zgb74" event={"ID":"f6ebc0b7-5104-477a-80f2-e0540e5e0b31","Type":"ContainerDied","Data":"6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20"} Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.546290 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zgb74" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.546306 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zgb74" event={"ID":"f6ebc0b7-5104-477a-80f2-e0540e5e0b31","Type":"ContainerDied","Data":"af5e4a760a418d43bc1c3a46ef6fb8574f019f17d16be806b19cfc7dd6550e6a"} Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.546327 4876 scope.go:117] "RemoveContainer" containerID="6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.571500 4876 scope.go:117] "RemoveContainer" containerID="ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.597840 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zgb74"] Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.610532 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zgb74"] Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.620769 4876 scope.go:117] "RemoveContainer" containerID="01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.652131 4876 scope.go:117] "RemoveContainer" containerID="6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20" Dec 15 09:41:23 crc kubenswrapper[4876]: E1215 09:41:23.652788 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20\": container with ID starting with 6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20 not found: ID does not exist" containerID="6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.652883 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20"} err="failed to get container status \"6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20\": rpc error: code = NotFound desc = could not find container \"6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20\": container with ID starting with 6b7623e0f81e20720dc6fad6da803b4534388907415df9bf2a1f9ec91d706f20 not found: ID does not exist" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.652927 4876 scope.go:117] "RemoveContainer" containerID="ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275" Dec 15 09:41:23 crc kubenswrapper[4876]: E1215 09:41:23.655617 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275\": container with ID starting with ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275 not found: ID does not exist" containerID="ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.655686 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275"} err="failed to get container status \"ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275\": rpc error: code = NotFound desc = could not find container \"ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275\": container with ID starting with ee45bd1b0e698f6eb237564f426b242503424c082b7546436c58dae1f25fa275 not found: ID does not exist" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.655726 4876 scope.go:117] "RemoveContainer" containerID="01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559" Dec 15 09:41:23 crc kubenswrapper[4876]: E1215 09:41:23.656230 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559\": container with ID starting with 01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559 not found: ID does not exist" containerID="01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559" Dec 15 09:41:23 crc kubenswrapper[4876]: I1215 09:41:23.656262 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559"} err="failed to get container status \"01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559\": rpc error: code = NotFound desc = could not find container \"01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559\": container with ID starting with 01a54983735d1dd3820ea5ffaad62be7b3a8cb4540c69b90890b00e6aee1a559 not found: ID does not exist" Dec 15 09:41:24 crc kubenswrapper[4876]: I1215 09:41:24.731121 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" path="/var/lib/kubelet/pods/f6ebc0b7-5104-477a-80f2-e0540e5e0b31/volumes" Dec 15 09:41:39 crc kubenswrapper[4876]: I1215 09:41:39.387584 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 09:41:39 crc kubenswrapper[4876]: I1215 09:41:39.388348 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="9bdbec5e-3396-41be-988d-f7883fca149a" containerName="nova-cell0-conductor-conductor" containerID="cri-o://7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013" gracePeriod=30 Dec 15 09:41:39 crc kubenswrapper[4876]: I1215 09:41:39.417505 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 09:41:39 crc kubenswrapper[4876]: I1215 09:41:39.417716 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="a164b6d5-b873-42ee-8633-a41c81282469" containerName="nova-cell1-conductor-conductor" containerID="cri-o://adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0" gracePeriod=30 Dec 15 09:41:39 crc kubenswrapper[4876]: E1215 09:41:39.502875 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 09:41:39 crc kubenswrapper[4876]: E1215 09:41:39.504493 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 09:41:39 crc kubenswrapper[4876]: E1215 09:41:39.506221 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 09:41:39 crc kubenswrapper[4876]: E1215 09:41:39.506318 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="a164b6d5-b873-42ee-8633-a41c81282469" containerName="nova-cell1-conductor-conductor" Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.101024 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013 is running failed: container process not found" containerID="7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.101904 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013 is running failed: container process not found" containerID="7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.102310 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013 is running failed: container process not found" containerID="7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.102358 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="9bdbec5e-3396-41be-988d-f7883fca149a" containerName="nova-cell0-conductor-conductor" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.148367 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.148576 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerName="nova-api-log" containerID="cri-o://d581a9420fa07ef24e6a91716a26f4de8071f8016669a486c9af1f671ada4f63" gracePeriod=30 Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.149083 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerName="nova-api-api" containerID="cri-o://91f38ad56e3b97487d5f76a93265af62320fc1976a4257a99903d0edf1fb863f" gracePeriod=30 Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.201611 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.201814 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="356d173e-c3de-410a-938a-0b96ae635b8c" containerName="nova-scheduler-scheduler" containerID="cri-o://a534b95b1d16f226db2fca1e598ee3d819604ead8e1c389e1d0e8db94053e21a" gracePeriod=30 Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.258304 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.258537 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-log" containerID="cri-o://914958b5838f9e28f2d70ee55eb77773903a4b51d0ca09f26aa803e8b5be6890" gracePeriod=30 Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.266265 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-metadata" containerID="cri-o://ba2ac4ece6d57c8b8ac1cc0728c8febb0724b156c6c42a505702645d7f881fab" gracePeriod=30 Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.482322 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.515698 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-combined-ca-bundle\") pod \"9bdbec5e-3396-41be-988d-f7883fca149a\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.515821 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6dh6\" (UniqueName: \"kubernetes.io/projected/9bdbec5e-3396-41be-988d-f7883fca149a-kube-api-access-s6dh6\") pod \"9bdbec5e-3396-41be-988d-f7883fca149a\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.523943 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bdbec5e-3396-41be-988d-f7883fca149a-kube-api-access-s6dh6" (OuterVolumeSpecName: "kube-api-access-s6dh6") pod "9bdbec5e-3396-41be-988d-f7883fca149a" (UID: "9bdbec5e-3396-41be-988d-f7883fca149a"). InnerVolumeSpecName "kube-api-access-s6dh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.549909 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.562333 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bdbec5e-3396-41be-988d-f7883fca149a" (UID: "9bdbec5e-3396-41be-988d-f7883fca149a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.617524 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-combined-ca-bundle\") pod \"a164b6d5-b873-42ee-8633-a41c81282469\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.617753 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-config-data\") pod \"a164b6d5-b873-42ee-8633-a41c81282469\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.617977 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhxnd\" (UniqueName: \"kubernetes.io/projected/a164b6d5-b873-42ee-8633-a41c81282469-kube-api-access-hhxnd\") pod \"a164b6d5-b873-42ee-8633-a41c81282469\" (UID: \"a164b6d5-b873-42ee-8633-a41c81282469\") " Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.618150 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-config-data\") pod \"9bdbec5e-3396-41be-988d-f7883fca149a\" (UID: \"9bdbec5e-3396-41be-988d-f7883fca149a\") " Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.618626 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.618751 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6dh6\" (UniqueName: \"kubernetes.io/projected/9bdbec5e-3396-41be-988d-f7883fca149a-kube-api-access-s6dh6\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.621444 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a164b6d5-b873-42ee-8633-a41c81282469-kube-api-access-hhxnd" (OuterVolumeSpecName: "kube-api-access-hhxnd") pod "a164b6d5-b873-42ee-8633-a41c81282469" (UID: "a164b6d5-b873-42ee-8633-a41c81282469"). InnerVolumeSpecName "kube-api-access-hhxnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.644189 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-config-data" (OuterVolumeSpecName: "config-data") pod "a164b6d5-b873-42ee-8633-a41c81282469" (UID: "a164b6d5-b873-42ee-8633-a41c81282469"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.644733 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a164b6d5-b873-42ee-8633-a41c81282469" (UID: "a164b6d5-b873-42ee-8633-a41c81282469"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.657487 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-config-data" (OuterVolumeSpecName: "config-data") pod "9bdbec5e-3396-41be-988d-f7883fca149a" (UID: "9bdbec5e-3396-41be-988d-f7883fca149a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.702046 4876 generic.go:334] "Generic (PLEG): container finished" podID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerID="d581a9420fa07ef24e6a91716a26f4de8071f8016669a486c9af1f671ada4f63" exitCode=143 Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.702170 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"072d0610-d4dc-4091-bed1-d145dae1a1d8","Type":"ContainerDied","Data":"d581a9420fa07ef24e6a91716a26f4de8071f8016669a486c9af1f671ada4f63"} Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.704401 4876 generic.go:334] "Generic (PLEG): container finished" podID="a164b6d5-b873-42ee-8633-a41c81282469" containerID="adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0" exitCode=0 Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.704479 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a164b6d5-b873-42ee-8633-a41c81282469","Type":"ContainerDied","Data":"adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0"} Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.704527 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a164b6d5-b873-42ee-8633-a41c81282469","Type":"ContainerDied","Data":"0d3979f7540a0f1ea15b4dd30e20ab4765fff4483e43459992f8288a9fb95514"} Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.704549 4876 scope.go:117] "RemoveContainer" containerID="adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.704765 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.712229 4876 generic.go:334] "Generic (PLEG): container finished" podID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerID="914958b5838f9e28f2d70ee55eb77773903a4b51d0ca09f26aa803e8b5be6890" exitCode=143 Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.717443 4876 generic.go:334] "Generic (PLEG): container finished" podID="9bdbec5e-3396-41be-988d-f7883fca149a" containerID="7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013" exitCode=0 Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.717529 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.723550 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhxnd\" (UniqueName: \"kubernetes.io/projected/a164b6d5-b873-42ee-8633-a41c81282469-kube-api-access-hhxnd\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.723734 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc51cb24-9e65-4489-80be-860db3e6b3d9","Type":"ContainerDied","Data":"914958b5838f9e28f2d70ee55eb77773903a4b51d0ca09f26aa803e8b5be6890"} Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.723766 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9bdbec5e-3396-41be-988d-f7883fca149a","Type":"ContainerDied","Data":"7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013"} Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.723782 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9bdbec5e-3396-41be-988d-f7883fca149a","Type":"ContainerDied","Data":"bf4d927a8381dba0d7f38c473e3054be033e6efda8818be69a57bf2b651448fc"} Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.724258 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9bdbec5e-3396-41be-988d-f7883fca149a-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.724280 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.724291 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a164b6d5-b873-42ee-8633-a41c81282469-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.731326 4876 scope.go:117] "RemoveContainer" containerID="adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0" Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.731688 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0\": container with ID starting with adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0 not found: ID does not exist" containerID="adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.731735 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0"} err="failed to get container status \"adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0\": rpc error: code = NotFound desc = could not find container \"adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0\": container with ID starting with adfe50be01a24b623e9687db9e0c09fb13b07b4e67f0bbb11e97f7f56073c9f0 not found: ID does not exist" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.731763 4876 scope.go:117] "RemoveContainer" containerID="7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.750366 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.763321 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.770951 4876 scope.go:117] "RemoveContainer" containerID="7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013" Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.772494 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013\": container with ID starting with 7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013 not found: ID does not exist" containerID="7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.772547 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013"} err="failed to get container status \"7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013\": rpc error: code = NotFound desc = could not find container \"7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013\": container with ID starting with 7ab15d4d5bcb57ad7242512d8be772cf3f6ca49bc4f609e51f46908c1d80d013 not found: ID does not exist" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.780485 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.795310 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.809279 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.809876 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerName="extract-utilities" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.809898 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerName="extract-utilities" Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.809909 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerName="registry-server" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.809920 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerName="registry-server" Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.809934 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bdbec5e-3396-41be-988d-f7883fca149a" containerName="nova-cell0-conductor-conductor" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.809942 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bdbec5e-3396-41be-988d-f7883fca149a" containerName="nova-cell0-conductor-conductor" Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.809955 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerName="extract-content" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.809963 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerName="extract-content" Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.809981 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a164b6d5-b873-42ee-8633-a41c81282469" containerName="nova-cell1-conductor-conductor" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.809988 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a164b6d5-b873-42ee-8633-a41c81282469" containerName="nova-cell1-conductor-conductor" Dec 15 09:41:40 crc kubenswrapper[4876]: E1215 09:41:40.810005 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69d2e77c-a14c-43ab-ac11-90cd6e7808f5" containerName="neutron-dhcp-openstack-openstack-cell1" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.810012 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="69d2e77c-a14c-43ab-ac11-90cd6e7808f5" containerName="neutron-dhcp-openstack-openstack-cell1" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.810265 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a164b6d5-b873-42ee-8633-a41c81282469" containerName="nova-cell1-conductor-conductor" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.810294 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bdbec5e-3396-41be-988d-f7883fca149a" containerName="nova-cell0-conductor-conductor" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.810310 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="69d2e77c-a14c-43ab-ac11-90cd6e7808f5" containerName="neutron-dhcp-openstack-openstack-cell1" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.810325 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6ebc0b7-5104-477a-80f2-e0540e5e0b31" containerName="registry-server" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.811198 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.815931 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.819173 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.829067 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-297rh\" (UniqueName: \"kubernetes.io/projected/ce9bf6a7-85ad-46ef-bf55-0e48140b4abf-kube-api-access-297rh\") pod \"nova-cell1-conductor-0\" (UID: \"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf\") " pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.829159 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce9bf6a7-85ad-46ef-bf55-0e48140b4abf-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf\") " pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.829335 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9bf6a7-85ad-46ef-bf55-0e48140b4abf-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf\") " pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.829462 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.831095 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.835069 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.846414 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.931464 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9bf6a7-85ad-46ef-bf55-0e48140b4abf-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf\") " pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.932440 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-297rh\" (UniqueName: \"kubernetes.io/projected/ce9bf6a7-85ad-46ef-bf55-0e48140b4abf-kube-api-access-297rh\") pod \"nova-cell1-conductor-0\" (UID: \"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf\") " pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.932570 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce9bf6a7-85ad-46ef-bf55-0e48140b4abf-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf\") " pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.932646 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqhqx\" (UniqueName: \"kubernetes.io/projected/51c89021-f333-443b-bbaa-d0c236ce716b-kube-api-access-tqhqx\") pod \"nova-cell0-conductor-0\" (UID: \"51c89021-f333-443b-bbaa-d0c236ce716b\") " pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.932884 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c89021-f333-443b-bbaa-d0c236ce716b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"51c89021-f333-443b-bbaa-d0c236ce716b\") " pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.932946 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51c89021-f333-443b-bbaa-d0c236ce716b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"51c89021-f333-443b-bbaa-d0c236ce716b\") " pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.936291 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9bf6a7-85ad-46ef-bf55-0e48140b4abf-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf\") " pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.936442 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce9bf6a7-85ad-46ef-bf55-0e48140b4abf-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf\") " pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:40 crc kubenswrapper[4876]: I1215 09:41:40.948430 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-297rh\" (UniqueName: \"kubernetes.io/projected/ce9bf6a7-85ad-46ef-bf55-0e48140b4abf-kube-api-access-297rh\") pod \"nova-cell1-conductor-0\" (UID: \"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf\") " pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.035069 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c89021-f333-443b-bbaa-d0c236ce716b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"51c89021-f333-443b-bbaa-d0c236ce716b\") " pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.035155 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51c89021-f333-443b-bbaa-d0c236ce716b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"51c89021-f333-443b-bbaa-d0c236ce716b\") " pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.035327 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqhqx\" (UniqueName: \"kubernetes.io/projected/51c89021-f333-443b-bbaa-d0c236ce716b-kube-api-access-tqhqx\") pod \"nova-cell0-conductor-0\" (UID: \"51c89021-f333-443b-bbaa-d0c236ce716b\") " pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.038527 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c89021-f333-443b-bbaa-d0c236ce716b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"51c89021-f333-443b-bbaa-d0c236ce716b\") " pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.038860 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51c89021-f333-443b-bbaa-d0c236ce716b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"51c89021-f333-443b-bbaa-d0c236ce716b\") " pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.051826 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqhqx\" (UniqueName: \"kubernetes.io/projected/51c89021-f333-443b-bbaa-d0c236ce716b-kube-api-access-tqhqx\") pod \"nova-cell0-conductor-0\" (UID: \"51c89021-f333-443b-bbaa-d0c236ce716b\") " pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.144841 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.160599 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:41 crc kubenswrapper[4876]: E1215 09:41:41.416302 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a534b95b1d16f226db2fca1e598ee3d819604ead8e1c389e1d0e8db94053e21a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 09:41:41 crc kubenswrapper[4876]: E1215 09:41:41.418602 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a534b95b1d16f226db2fca1e598ee3d819604ead8e1c389e1d0e8db94053e21a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 09:41:41 crc kubenswrapper[4876]: E1215 09:41:41.421559 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a534b95b1d16f226db2fca1e598ee3d819604ead8e1c389e1d0e8db94053e21a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 15 09:41:41 crc kubenswrapper[4876]: E1215 09:41:41.421642 4876 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="356d173e-c3de-410a-938a-0b96ae635b8c" containerName="nova-scheduler-scheduler" Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.633039 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.737493 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf","Type":"ContainerStarted","Data":"c29f3c558bea0fc393b1d3d226c264ac05db72063d0e08c387fd389a0ab11f43"} Dec 15 09:41:41 crc kubenswrapper[4876]: W1215 09:41:41.745507 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51c89021_f333_443b_bbaa_d0c236ce716b.slice/crio-176804445f1b2935dd7bb020e7f201582a00af503bde6f35ec66bd16c49bce92 WatchSource:0}: Error finding container 176804445f1b2935dd7bb020e7f201582a00af503bde6f35ec66bd16c49bce92: Status 404 returned error can't find the container with id 176804445f1b2935dd7bb020e7f201582a00af503bde6f35ec66bd16c49bce92 Dec 15 09:41:41 crc kubenswrapper[4876]: I1215 09:41:41.746632 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 15 09:41:42 crc kubenswrapper[4876]: I1215 09:41:42.768097 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bdbec5e-3396-41be-988d-f7883fca149a" path="/var/lib/kubelet/pods/9bdbec5e-3396-41be-988d-f7883fca149a/volumes" Dec 15 09:41:42 crc kubenswrapper[4876]: I1215 09:41:42.769621 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a164b6d5-b873-42ee-8633-a41c81282469" path="/var/lib/kubelet/pods/a164b6d5-b873-42ee-8633-a41c81282469/volumes" Dec 15 09:41:42 crc kubenswrapper[4876]: I1215 09:41:42.863188 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ce9bf6a7-85ad-46ef-bf55-0e48140b4abf","Type":"ContainerStarted","Data":"b2bb01416a6659a65a3a1a5d91b13f79858b457209bfc2db33fb637862b42f9b"} Dec 15 09:41:42 crc kubenswrapper[4876]: I1215 09:41:42.865421 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:42 crc kubenswrapper[4876]: I1215 09:41:42.944594 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.944577026 podStartE2EDuration="2.944577026s" podCreationTimestamp="2025-12-15 09:41:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:41:42.942387377 +0000 UTC m=+10228.513530288" watchObservedRunningTime="2025-12-15 09:41:42.944577026 +0000 UTC m=+10228.515719937" Dec 15 09:41:42 crc kubenswrapper[4876]: I1215 09:41:42.959144 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"51c89021-f333-443b-bbaa-d0c236ce716b","Type":"ContainerStarted","Data":"06fb93e84ef5e4f437447a02cb19fec10e4bba806edf3a93055eb850adb2c129"} Dec 15 09:41:42 crc kubenswrapper[4876]: I1215 09:41:42.959197 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"51c89021-f333-443b-bbaa-d0c236ce716b","Type":"ContainerStarted","Data":"176804445f1b2935dd7bb020e7f201582a00af503bde6f35ec66bd16c49bce92"} Dec 15 09:41:42 crc kubenswrapper[4876]: I1215 09:41:42.959790 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:42 crc kubenswrapper[4876]: I1215 09:41:42.999906 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.999890509 podStartE2EDuration="2.999890509s" podCreationTimestamp="2025-12-15 09:41:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:41:42.997202396 +0000 UTC m=+10228.568345327" watchObservedRunningTime="2025-12-15 09:41:42.999890509 +0000 UTC m=+10228.571033420" Dec 15 09:41:43 crc kubenswrapper[4876]: I1215 09:41:43.439187 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": read tcp 10.217.0.2:39856->10.217.1.94:8775: read: connection reset by peer" Dec 15 09:41:43 crc kubenswrapper[4876]: I1215 09:41:43.439195 4876 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.94:8775/\": read tcp 10.217.0.2:39854->10.217.1.94:8775: read: connection reset by peer" Dec 15 09:41:43 crc kubenswrapper[4876]: I1215 09:41:43.981496 4876 generic.go:334] "Generic (PLEG): container finished" podID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerID="91f38ad56e3b97487d5f76a93265af62320fc1976a4257a99903d0edf1fb863f" exitCode=0 Dec 15 09:41:43 crc kubenswrapper[4876]: I1215 09:41:43.981592 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"072d0610-d4dc-4091-bed1-d145dae1a1d8","Type":"ContainerDied","Data":"91f38ad56e3b97487d5f76a93265af62320fc1976a4257a99903d0edf1fb863f"} Dec 15 09:41:43 crc kubenswrapper[4876]: I1215 09:41:43.984676 4876 generic.go:334] "Generic (PLEG): container finished" podID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerID="ba2ac4ece6d57c8b8ac1cc0728c8febb0724b156c6c42a505702645d7f881fab" exitCode=0 Dec 15 09:41:43 crc kubenswrapper[4876]: I1215 09:41:43.984841 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc51cb24-9e65-4489-80be-860db3e6b3d9","Type":"ContainerDied","Data":"ba2ac4ece6d57c8b8ac1cc0728c8febb0724b156c6c42a505702645d7f881fab"} Dec 15 09:41:43 crc kubenswrapper[4876]: I1215 09:41:43.984928 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fc51cb24-9e65-4489-80be-860db3e6b3d9","Type":"ContainerDied","Data":"1f5a36f1e9ea79bfc4444e9c8cac6cc4c197558e8ba05ee3ba178cbb1d5cc635"} Dec 15 09:41:43 crc kubenswrapper[4876]: I1215 09:41:43.984941 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f5a36f1e9ea79bfc4444e9c8cac6cc4c197558e8ba05ee3ba178cbb1d5cc635" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.050497 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.055162 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.167058 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-config-data\") pod \"fc51cb24-9e65-4489-80be-860db3e6b3d9\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.167264 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-combined-ca-bundle\") pod \"fc51cb24-9e65-4489-80be-860db3e6b3d9\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.167315 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4r66\" (UniqueName: \"kubernetes.io/projected/072d0610-d4dc-4091-bed1-d145dae1a1d8-kube-api-access-n4r66\") pod \"072d0610-d4dc-4091-bed1-d145dae1a1d8\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.167337 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/072d0610-d4dc-4091-bed1-d145dae1a1d8-logs\") pod \"072d0610-d4dc-4091-bed1-d145dae1a1d8\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.167361 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-config-data\") pod \"072d0610-d4dc-4091-bed1-d145dae1a1d8\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.167416 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-combined-ca-bundle\") pod \"072d0610-d4dc-4091-bed1-d145dae1a1d8\" (UID: \"072d0610-d4dc-4091-bed1-d145dae1a1d8\") " Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.167507 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc51cb24-9e65-4489-80be-860db3e6b3d9-logs\") pod \"fc51cb24-9e65-4489-80be-860db3e6b3d9\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.167550 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r4sp\" (UniqueName: \"kubernetes.io/projected/fc51cb24-9e65-4489-80be-860db3e6b3d9-kube-api-access-4r4sp\") pod \"fc51cb24-9e65-4489-80be-860db3e6b3d9\" (UID: \"fc51cb24-9e65-4489-80be-860db3e6b3d9\") " Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.171611 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc51cb24-9e65-4489-80be-860db3e6b3d9-logs" (OuterVolumeSpecName: "logs") pod "fc51cb24-9e65-4489-80be-860db3e6b3d9" (UID: "fc51cb24-9e65-4489-80be-860db3e6b3d9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.175337 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/072d0610-d4dc-4091-bed1-d145dae1a1d8-logs" (OuterVolumeSpecName: "logs") pod "072d0610-d4dc-4091-bed1-d145dae1a1d8" (UID: "072d0610-d4dc-4091-bed1-d145dae1a1d8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.176299 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/072d0610-d4dc-4091-bed1-d145dae1a1d8-kube-api-access-n4r66" (OuterVolumeSpecName: "kube-api-access-n4r66") pod "072d0610-d4dc-4091-bed1-d145dae1a1d8" (UID: "072d0610-d4dc-4091-bed1-d145dae1a1d8"). InnerVolumeSpecName "kube-api-access-n4r66". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.179328 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc51cb24-9e65-4489-80be-860db3e6b3d9-kube-api-access-4r4sp" (OuterVolumeSpecName: "kube-api-access-4r4sp") pod "fc51cb24-9e65-4489-80be-860db3e6b3d9" (UID: "fc51cb24-9e65-4489-80be-860db3e6b3d9"). InnerVolumeSpecName "kube-api-access-4r4sp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.209871 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc51cb24-9e65-4489-80be-860db3e6b3d9" (UID: "fc51cb24-9e65-4489-80be-860db3e6b3d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.219577 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "072d0610-d4dc-4091-bed1-d145dae1a1d8" (UID: "072d0610-d4dc-4091-bed1-d145dae1a1d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.222477 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-config-data" (OuterVolumeSpecName: "config-data") pod "fc51cb24-9e65-4489-80be-860db3e6b3d9" (UID: "fc51cb24-9e65-4489-80be-860db3e6b3d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.231244 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-config-data" (OuterVolumeSpecName: "config-data") pod "072d0610-d4dc-4091-bed1-d145dae1a1d8" (UID: "072d0610-d4dc-4091-bed1-d145dae1a1d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.269899 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc51cb24-9e65-4489-80be-860db3e6b3d9-logs\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.269937 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r4sp\" (UniqueName: \"kubernetes.io/projected/fc51cb24-9e65-4489-80be-860db3e6b3d9-kube-api-access-4r4sp\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.269948 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.269957 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc51cb24-9e65-4489-80be-860db3e6b3d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.269966 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4r66\" (UniqueName: \"kubernetes.io/projected/072d0610-d4dc-4091-bed1-d145dae1a1d8-kube-api-access-n4r66\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.269974 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.269984 4876 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/072d0610-d4dc-4091-bed1-d145dae1a1d8-logs\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.269993 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/072d0610-d4dc-4091-bed1-d145dae1a1d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.995768 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.996915 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.997488 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"072d0610-d4dc-4091-bed1-d145dae1a1d8","Type":"ContainerDied","Data":"ad855c6de82bfc8717ad13eb9be5fcd8d022417a49541afbbc8b4445658bd755"} Dec 15 09:41:44 crc kubenswrapper[4876]: I1215 09:41:44.997533 4876 scope.go:117] "RemoveContainer" containerID="91f38ad56e3b97487d5f76a93265af62320fc1976a4257a99903d0edf1fb863f" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.023776 4876 scope.go:117] "RemoveContainer" containerID="d581a9420fa07ef24e6a91716a26f4de8071f8016669a486c9af1f671ada4f63" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.034396 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.051242 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.072751 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.087296 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.110762 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 15 09:41:45 crc kubenswrapper[4876]: E1215 09:41:45.111460 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerName="nova-api-api" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.111495 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerName="nova-api-api" Dec 15 09:41:45 crc kubenswrapper[4876]: E1215 09:41:45.111521 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-log" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.111530 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-log" Dec 15 09:41:45 crc kubenswrapper[4876]: E1215 09:41:45.111556 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerName="nova-api-log" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.111566 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerName="nova-api-log" Dec 15 09:41:45 crc kubenswrapper[4876]: E1215 09:41:45.111587 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-metadata" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.111595 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-metadata" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.111834 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerName="nova-api-log" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.111869 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" containerName="nova-api-api" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.111890 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-metadata" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.111901 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" containerName="nova-metadata-log" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.113357 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.118021 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.120539 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.134612 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.136542 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.139196 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.148250 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.194393 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88581f4e-4f4d-42e8-a59f-5bd7385d9485-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.194456 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88581f4e-4f4d-42e8-a59f-5bd7385d9485-config-data\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.194521 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vczlf\" (UniqueName: \"kubernetes.io/projected/e0e287f5-1611-44e0-bc10-8ca467a89dbe-kube-api-access-vczlf\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.194569 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e287f5-1611-44e0-bc10-8ca467a89dbe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.194685 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rswf5\" (UniqueName: \"kubernetes.io/projected/88581f4e-4f4d-42e8-a59f-5bd7385d9485-kube-api-access-rswf5\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.194705 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88581f4e-4f4d-42e8-a59f-5bd7385d9485-logs\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.195591 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0e287f5-1611-44e0-bc10-8ca467a89dbe-logs\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.195684 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e287f5-1611-44e0-bc10-8ca467a89dbe-config-data\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.297666 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88581f4e-4f4d-42e8-a59f-5bd7385d9485-config-data\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.297751 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vczlf\" (UniqueName: \"kubernetes.io/projected/e0e287f5-1611-44e0-bc10-8ca467a89dbe-kube-api-access-vczlf\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.297815 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e287f5-1611-44e0-bc10-8ca467a89dbe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.297914 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rswf5\" (UniqueName: \"kubernetes.io/projected/88581f4e-4f4d-42e8-a59f-5bd7385d9485-kube-api-access-rswf5\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.297941 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88581f4e-4f4d-42e8-a59f-5bd7385d9485-logs\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.297964 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0e287f5-1611-44e0-bc10-8ca467a89dbe-logs\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.298014 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e287f5-1611-44e0-bc10-8ca467a89dbe-config-data\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.298053 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88581f4e-4f4d-42e8-a59f-5bd7385d9485-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.298556 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0e287f5-1611-44e0-bc10-8ca467a89dbe-logs\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.298853 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88581f4e-4f4d-42e8-a59f-5bd7385d9485-logs\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.303664 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e287f5-1611-44e0-bc10-8ca467a89dbe-config-data\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.304922 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88581f4e-4f4d-42e8-a59f-5bd7385d9485-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.305906 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e287f5-1611-44e0-bc10-8ca467a89dbe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.310477 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88581f4e-4f4d-42e8-a59f-5bd7385d9485-config-data\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.318271 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rswf5\" (UniqueName: \"kubernetes.io/projected/88581f4e-4f4d-42e8-a59f-5bd7385d9485-kube-api-access-rswf5\") pod \"nova-api-0\" (UID: \"88581f4e-4f4d-42e8-a59f-5bd7385d9485\") " pod="openstack/nova-api-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.324121 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vczlf\" (UniqueName: \"kubernetes.io/projected/e0e287f5-1611-44e0-bc10-8ca467a89dbe-kube-api-access-vczlf\") pod \"nova-metadata-0\" (UID: \"e0e287f5-1611-44e0-bc10-8ca467a89dbe\") " pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.442896 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 15 09:41:45 crc kubenswrapper[4876]: I1215 09:41:45.463498 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.012084 4876 generic.go:334] "Generic (PLEG): container finished" podID="356d173e-c3de-410a-938a-0b96ae635b8c" containerID="a534b95b1d16f226db2fca1e598ee3d819604ead8e1c389e1d0e8db94053e21a" exitCode=0 Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.012697 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"356d173e-c3de-410a-938a-0b96ae635b8c","Type":"ContainerDied","Data":"a534b95b1d16f226db2fca1e598ee3d819604ead8e1c389e1d0e8db94053e21a"} Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.064307 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 15 09:41:46 crc kubenswrapper[4876]: W1215 09:41:46.071184 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0e287f5_1611_44e0_bc10_8ca467a89dbe.slice/crio-01139a302caafa057ec0361c87c8b8403c52b50409f713f755d24639e375b526 WatchSource:0}: Error finding container 01139a302caafa057ec0361c87c8b8403c52b50409f713f755d24639e375b526: Status 404 returned error can't find the container with id 01139a302caafa057ec0361c87c8b8403c52b50409f713f755d24639e375b526 Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.079097 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.177420 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.220963 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.299435 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.321469 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-config-data\") pod \"356d173e-c3de-410a-938a-0b96ae635b8c\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.321518 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2wt6\" (UniqueName: \"kubernetes.io/projected/356d173e-c3de-410a-938a-0b96ae635b8c-kube-api-access-h2wt6\") pod \"356d173e-c3de-410a-938a-0b96ae635b8c\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.321767 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-combined-ca-bundle\") pod \"356d173e-c3de-410a-938a-0b96ae635b8c\" (UID: \"356d173e-c3de-410a-938a-0b96ae635b8c\") " Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.349348 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/356d173e-c3de-410a-938a-0b96ae635b8c-kube-api-access-h2wt6" (OuterVolumeSpecName: "kube-api-access-h2wt6") pod "356d173e-c3de-410a-938a-0b96ae635b8c" (UID: "356d173e-c3de-410a-938a-0b96ae635b8c"). InnerVolumeSpecName "kube-api-access-h2wt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.389002 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-config-data" (OuterVolumeSpecName: "config-data") pod "356d173e-c3de-410a-938a-0b96ae635b8c" (UID: "356d173e-c3de-410a-938a-0b96ae635b8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.395980 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "356d173e-c3de-410a-938a-0b96ae635b8c" (UID: "356d173e-c3de-410a-938a-0b96ae635b8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.423079 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.423127 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/356d173e-c3de-410a-938a-0b96ae635b8c-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.423136 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2wt6\" (UniqueName: \"kubernetes.io/projected/356d173e-c3de-410a-938a-0b96ae635b8c-kube-api-access-h2wt6\") on node \"crc\" DevicePath \"\"" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.718693 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="072d0610-d4dc-4091-bed1-d145dae1a1d8" path="/var/lib/kubelet/pods/072d0610-d4dc-4091-bed1-d145dae1a1d8/volumes" Dec 15 09:41:46 crc kubenswrapper[4876]: I1215 09:41:46.719753 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc51cb24-9e65-4489-80be-860db3e6b3d9" path="/var/lib/kubelet/pods/fc51cb24-9e65-4489-80be-860db3e6b3d9/volumes" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.028171 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"356d173e-c3de-410a-938a-0b96ae635b8c","Type":"ContainerDied","Data":"fe49e22a065fbb232269e73254dd3654f75e781256f14db95c9a8679d48e48dc"} Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.029171 4876 scope.go:117] "RemoveContainer" containerID="a534b95b1d16f226db2fca1e598ee3d819604ead8e1c389e1d0e8db94053e21a" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.029306 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.032968 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"88581f4e-4f4d-42e8-a59f-5bd7385d9485","Type":"ContainerStarted","Data":"027e1bf3c08a01ca53ebde26a523a7d075b96c62d8644b948890f7120954c510"} Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.033014 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"88581f4e-4f4d-42e8-a59f-5bd7385d9485","Type":"ContainerStarted","Data":"a9e73fc2638eaf4d6a2cf73bb5d3d115b0c1e930ef91c418460f4fc4c87b0fb7"} Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.033028 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"88581f4e-4f4d-42e8-a59f-5bd7385d9485","Type":"ContainerStarted","Data":"e6804648b0d4b8da830775614dcde462ca4998ac738acbac25c9d9c64fd4e684"} Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.037492 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0e287f5-1611-44e0-bc10-8ca467a89dbe","Type":"ContainerStarted","Data":"68f077e2913855bbfc0a65ea6e192bb52f2e8a78d3c1c605f188d05bc764aec3"} Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.037530 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0e287f5-1611-44e0-bc10-8ca467a89dbe","Type":"ContainerStarted","Data":"02872ef0f3e14d0339aa7354e9fb15ff00a3da4c110c3cbd18867fdcdf1d7b6e"} Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.037541 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e0e287f5-1611-44e0-bc10-8ca467a89dbe","Type":"ContainerStarted","Data":"01139a302caafa057ec0361c87c8b8403c52b50409f713f755d24639e375b526"} Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.057400 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.081469 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.094039 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 09:41:47 crc kubenswrapper[4876]: E1215 09:41:47.094557 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="356d173e-c3de-410a-938a-0b96ae635b8c" containerName="nova-scheduler-scheduler" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.094578 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="356d173e-c3de-410a-938a-0b96ae635b8c" containerName="nova-scheduler-scheduler" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.094777 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="356d173e-c3de-410a-938a-0b96ae635b8c" containerName="nova-scheduler-scheduler" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.095610 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.101069 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.125520 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.137269 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21066d50-c54c-4d8d-8b53-84c8e8abbeb0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"21066d50-c54c-4d8d-8b53-84c8e8abbeb0\") " pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.137330 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmt5t\" (UniqueName: \"kubernetes.io/projected/21066d50-c54c-4d8d-8b53-84c8e8abbeb0-kube-api-access-jmt5t\") pod \"nova-scheduler-0\" (UID: \"21066d50-c54c-4d8d-8b53-84c8e8abbeb0\") " pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.137380 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21066d50-c54c-4d8d-8b53-84c8e8abbeb0-config-data\") pod \"nova-scheduler-0\" (UID: \"21066d50-c54c-4d8d-8b53-84c8e8abbeb0\") " pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.137854 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.137830251 podStartE2EDuration="2.137830251s" podCreationTimestamp="2025-12-15 09:41:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:41:47.078217652 +0000 UTC m=+10232.649360583" watchObservedRunningTime="2025-12-15 09:41:47.137830251 +0000 UTC m=+10232.708973162" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.148083 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.148063835 podStartE2EDuration="2.148063835s" podCreationTimestamp="2025-12-15 09:41:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:41:47.096456131 +0000 UTC m=+10232.667599072" watchObservedRunningTime="2025-12-15 09:41:47.148063835 +0000 UTC m=+10232.719206746" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.243423 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21066d50-c54c-4d8d-8b53-84c8e8abbeb0-config-data\") pod \"nova-scheduler-0\" (UID: \"21066d50-c54c-4d8d-8b53-84c8e8abbeb0\") " pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.244254 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21066d50-c54c-4d8d-8b53-84c8e8abbeb0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"21066d50-c54c-4d8d-8b53-84c8e8abbeb0\") " pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.244432 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmt5t\" (UniqueName: \"kubernetes.io/projected/21066d50-c54c-4d8d-8b53-84c8e8abbeb0-kube-api-access-jmt5t\") pod \"nova-scheduler-0\" (UID: \"21066d50-c54c-4d8d-8b53-84c8e8abbeb0\") " pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.249007 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21066d50-c54c-4d8d-8b53-84c8e8abbeb0-config-data\") pod \"nova-scheduler-0\" (UID: \"21066d50-c54c-4d8d-8b53-84c8e8abbeb0\") " pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.249749 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21066d50-c54c-4d8d-8b53-84c8e8abbeb0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"21066d50-c54c-4d8d-8b53-84c8e8abbeb0\") " pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.272776 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmt5t\" (UniqueName: \"kubernetes.io/projected/21066d50-c54c-4d8d-8b53-84c8e8abbeb0-kube-api-access-jmt5t\") pod \"nova-scheduler-0\" (UID: \"21066d50-c54c-4d8d-8b53-84c8e8abbeb0\") " pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.438588 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 15 09:41:47 crc kubenswrapper[4876]: I1215 09:41:47.972584 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 15 09:41:47 crc kubenswrapper[4876]: W1215 09:41:47.973364 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21066d50_c54c_4d8d_8b53_84c8e8abbeb0.slice/crio-bea5ed2ad3a65a10e10225b2b86b7339c1a562d5efea3983c8157620c3b33e2a WatchSource:0}: Error finding container bea5ed2ad3a65a10e10225b2b86b7339c1a562d5efea3983c8157620c3b33e2a: Status 404 returned error can't find the container with id bea5ed2ad3a65a10e10225b2b86b7339c1a562d5efea3983c8157620c3b33e2a Dec 15 09:41:48 crc kubenswrapper[4876]: I1215 09:41:48.052731 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"21066d50-c54c-4d8d-8b53-84c8e8abbeb0","Type":"ContainerStarted","Data":"bea5ed2ad3a65a10e10225b2b86b7339c1a562d5efea3983c8157620c3b33e2a"} Dec 15 09:41:48 crc kubenswrapper[4876]: I1215 09:41:48.717843 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="356d173e-c3de-410a-938a-0b96ae635b8c" path="/var/lib/kubelet/pods/356d173e-c3de-410a-938a-0b96ae635b8c/volumes" Dec 15 09:41:49 crc kubenswrapper[4876]: I1215 09:41:49.074205 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"21066d50-c54c-4d8d-8b53-84c8e8abbeb0","Type":"ContainerStarted","Data":"51ce29726bf9fbb27170e3f70435f3e6e954b77f63de3b1e993f66806617dc28"} Dec 15 09:41:49 crc kubenswrapper[4876]: I1215 09:41:49.101648 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.101625901 podStartE2EDuration="2.101625901s" podCreationTimestamp="2025-12-15 09:41:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 09:41:49.09487489 +0000 UTC m=+10234.666017821" watchObservedRunningTime="2025-12-15 09:41:49.101625901 +0000 UTC m=+10234.672768812" Dec 15 09:41:50 crc kubenswrapper[4876]: I1215 09:41:50.443938 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 09:41:50 crc kubenswrapper[4876]: I1215 09:41:50.445494 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 15 09:41:52 crc kubenswrapper[4876]: I1215 09:41:52.439206 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 15 09:41:55 crc kubenswrapper[4876]: I1215 09:41:55.444381 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 09:41:55 crc kubenswrapper[4876]: I1215 09:41:55.444448 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 15 09:41:55 crc kubenswrapper[4876]: I1215 09:41:55.466552 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 09:41:55 crc kubenswrapper[4876]: I1215 09:41:55.466609 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 15 09:41:56 crc kubenswrapper[4876]: I1215 09:41:56.610395 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="88581f4e-4f4d-42e8-a59f-5bd7385d9485" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.200:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 09:41:56 crc kubenswrapper[4876]: I1215 09:41:56.610448 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="88581f4e-4f4d-42e8-a59f-5bd7385d9485" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.200:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 09:41:56 crc kubenswrapper[4876]: I1215 09:41:56.611645 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e0e287f5-1611-44e0-bc10-8ca467a89dbe" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.199:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 09:41:56 crc kubenswrapper[4876]: I1215 09:41:56.611909 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e0e287f5-1611-44e0-bc10-8ca467a89dbe" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.199:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 15 09:41:57 crc kubenswrapper[4876]: I1215 09:41:57.439605 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 15 09:41:57 crc kubenswrapper[4876]: I1215 09:41:57.463863 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 15 09:41:58 crc kubenswrapper[4876]: I1215 09:41:58.186493 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 15 09:42:05 crc kubenswrapper[4876]: I1215 09:42:05.446404 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 15 09:42:05 crc kubenswrapper[4876]: I1215 09:42:05.447043 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 15 09:42:05 crc kubenswrapper[4876]: I1215 09:42:05.448549 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 15 09:42:05 crc kubenswrapper[4876]: I1215 09:42:05.449793 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 15 09:42:05 crc kubenswrapper[4876]: I1215 09:42:05.470520 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 15 09:42:05 crc kubenswrapper[4876]: I1215 09:42:05.471583 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 15 09:42:05 crc kubenswrapper[4876]: I1215 09:42:05.474927 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 15 09:42:05 crc kubenswrapper[4876]: I1215 09:42:05.476855 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 15 09:42:06 crc kubenswrapper[4876]: I1215 09:42:06.239607 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 15 09:42:06 crc kubenswrapper[4876]: I1215 09:42:06.242597 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.301703 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h"] Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.305494 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.309257 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.309681 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.310313 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.310356 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.311557 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.311682 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-6cbcn" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.311634 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.316328 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h"] Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.471555 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.471605 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhz7x\" (UniqueName: \"kubernetes.io/projected/dbb71641-2cea-47e7-af95-5dfe1de074d2-kube-api-access-xhz7x\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.471664 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.471685 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.471734 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.471756 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.471828 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.472014 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.472337 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.472436 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.472466 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574021 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574120 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574188 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574222 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574262 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574294 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhz7x\" (UniqueName: \"kubernetes.io/projected/dbb71641-2cea-47e7-af95-5dfe1de074d2-kube-api-access-xhz7x\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574352 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574378 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574457 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574481 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.574525 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.575454 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.575662 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.580657 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.581803 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.581968 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.582264 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.582823 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.583181 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.585495 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.585821 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.593481 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhz7x\" (UniqueName: \"kubernetes.io/projected/dbb71641-2cea-47e7-af95-5dfe1de074d2-kube-api-access-xhz7x\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:07 crc kubenswrapper[4876]: I1215 09:42:07.627947 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:42:08 crc kubenswrapper[4876]: W1215 09:42:08.288438 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbb71641_2cea_47e7_af95_5dfe1de074d2.slice/crio-88c2d0e341a9ab530f51c9b3e4651a6076f93e4128a79480372415067650618d WatchSource:0}: Error finding container 88c2d0e341a9ab530f51c9b3e4651a6076f93e4128a79480372415067650618d: Status 404 returned error can't find the container with id 88c2d0e341a9ab530f51c9b3e4651a6076f93e4128a79480372415067650618d Dec 15 09:42:08 crc kubenswrapper[4876]: I1215 09:42:08.291345 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 09:42:08 crc kubenswrapper[4876]: I1215 09:42:08.296238 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h"] Dec 15 09:42:09 crc kubenswrapper[4876]: I1215 09:42:09.270887 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" event={"ID":"dbb71641-2cea-47e7-af95-5dfe1de074d2","Type":"ContainerStarted","Data":"ba33cac4d099776ca7aaf861e615dedcb0687178c3010d5c346854fe15104478"} Dec 15 09:42:09 crc kubenswrapper[4876]: I1215 09:42:09.271282 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" event={"ID":"dbb71641-2cea-47e7-af95-5dfe1de074d2","Type":"ContainerStarted","Data":"88c2d0e341a9ab530f51c9b3e4651a6076f93e4128a79480372415067650618d"} Dec 15 09:42:09 crc kubenswrapper[4876]: I1215 09:42:09.302987 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" podStartSLOduration=1.7695610240000001 podStartE2EDuration="2.30296566s" podCreationTimestamp="2025-12-15 09:42:07 +0000 UTC" firstStartedPulling="2025-12-15 09:42:08.290827364 +0000 UTC m=+10253.861970275" lastFinishedPulling="2025-12-15 09:42:08.82423198 +0000 UTC m=+10254.395374911" observedRunningTime="2025-12-15 09:42:09.292122589 +0000 UTC m=+10254.863265500" watchObservedRunningTime="2025-12-15 09:42:09.30296566 +0000 UTC m=+10254.874108571" Dec 15 09:42:27 crc kubenswrapper[4876]: I1215 09:42:27.323186 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:42:27 crc kubenswrapper[4876]: I1215 09:42:27.323761 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:42:32 crc kubenswrapper[4876]: I1215 09:42:32.183011 4876 scope.go:117] "RemoveContainer" containerID="914958b5838f9e28f2d70ee55eb77773903a4b51d0ca09f26aa803e8b5be6890" Dec 15 09:42:32 crc kubenswrapper[4876]: I1215 09:42:32.205946 4876 scope.go:117] "RemoveContainer" containerID="ba2ac4ece6d57c8b8ac1cc0728c8febb0724b156c6c42a505702645d7f881fab" Dec 15 09:42:57 crc kubenswrapper[4876]: I1215 09:42:57.322623 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:42:57 crc kubenswrapper[4876]: I1215 09:42:57.323299 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:43:27 crc kubenswrapper[4876]: I1215 09:43:27.322766 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:43:27 crc kubenswrapper[4876]: I1215 09:43:27.323416 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:43:27 crc kubenswrapper[4876]: I1215 09:43:27.323466 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:43:27 crc kubenswrapper[4876]: I1215 09:43:27.324476 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8fc098a01dff252a30a9a0024f79e3ca6a9041be1255fd833c09624f277ff50a"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:43:27 crc kubenswrapper[4876]: I1215 09:43:27.324562 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://8fc098a01dff252a30a9a0024f79e3ca6a9041be1255fd833c09624f277ff50a" gracePeriod=600 Dec 15 09:43:28 crc kubenswrapper[4876]: I1215 09:43:28.051635 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="8fc098a01dff252a30a9a0024f79e3ca6a9041be1255fd833c09624f277ff50a" exitCode=0 Dec 15 09:43:28 crc kubenswrapper[4876]: I1215 09:43:28.051714 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"8fc098a01dff252a30a9a0024f79e3ca6a9041be1255fd833c09624f277ff50a"} Dec 15 09:43:28 crc kubenswrapper[4876]: I1215 09:43:28.052245 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c"} Dec 15 09:43:28 crc kubenswrapper[4876]: I1215 09:43:28.052268 4876 scope.go:117] "RemoveContainer" containerID="56e56a72c4a781ebf998e18e85ccd52cbe6e861a3d8ec687ec823be71bd98aba" Dec 15 09:44:45 crc kubenswrapper[4876]: I1215 09:44:45.818922 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lwl47"] Dec 15 09:44:45 crc kubenswrapper[4876]: I1215 09:44:45.822784 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:45 crc kubenswrapper[4876]: I1215 09:44:45.840873 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lwl47"] Dec 15 09:44:45 crc kubenswrapper[4876]: I1215 09:44:45.999877 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j925t\" (UniqueName: \"kubernetes.io/projected/196eac71-f21b-48dc-9bd9-e17cb8765bd5-kube-api-access-j925t\") pod \"redhat-marketplace-lwl47\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.000388 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-catalog-content\") pod \"redhat-marketplace-lwl47\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.000777 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-utilities\") pod \"redhat-marketplace-lwl47\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.103151 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-utilities\") pod \"redhat-marketplace-lwl47\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.103268 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j925t\" (UniqueName: \"kubernetes.io/projected/196eac71-f21b-48dc-9bd9-e17cb8765bd5-kube-api-access-j925t\") pod \"redhat-marketplace-lwl47\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.103369 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-catalog-content\") pod \"redhat-marketplace-lwl47\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.103736 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-utilities\") pod \"redhat-marketplace-lwl47\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.103818 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-catalog-content\") pod \"redhat-marketplace-lwl47\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.143478 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j925t\" (UniqueName: \"kubernetes.io/projected/196eac71-f21b-48dc-9bd9-e17cb8765bd5-kube-api-access-j925t\") pod \"redhat-marketplace-lwl47\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.154977 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.723526 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lwl47"] Dec 15 09:44:46 crc kubenswrapper[4876]: I1215 09:44:46.887312 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwl47" event={"ID":"196eac71-f21b-48dc-9bd9-e17cb8765bd5","Type":"ContainerStarted","Data":"337b0616ef5462401855154178885ab404773169be28d3495d0c1ada95ce18a3"} Dec 15 09:44:47 crc kubenswrapper[4876]: I1215 09:44:47.897348 4876 generic.go:334] "Generic (PLEG): container finished" podID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerID="38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069" exitCode=0 Dec 15 09:44:47 crc kubenswrapper[4876]: I1215 09:44:47.897395 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwl47" event={"ID":"196eac71-f21b-48dc-9bd9-e17cb8765bd5","Type":"ContainerDied","Data":"38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069"} Dec 15 09:44:48 crc kubenswrapper[4876]: I1215 09:44:48.913010 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwl47" event={"ID":"196eac71-f21b-48dc-9bd9-e17cb8765bd5","Type":"ContainerStarted","Data":"921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5"} Dec 15 09:44:49 crc kubenswrapper[4876]: I1215 09:44:49.923756 4876 generic.go:334] "Generic (PLEG): container finished" podID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerID="921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5" exitCode=0 Dec 15 09:44:49 crc kubenswrapper[4876]: I1215 09:44:49.923906 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwl47" event={"ID":"196eac71-f21b-48dc-9bd9-e17cb8765bd5","Type":"ContainerDied","Data":"921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5"} Dec 15 09:44:50 crc kubenswrapper[4876]: I1215 09:44:50.936675 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwl47" event={"ID":"196eac71-f21b-48dc-9bd9-e17cb8765bd5","Type":"ContainerStarted","Data":"5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335"} Dec 15 09:44:50 crc kubenswrapper[4876]: I1215 09:44:50.957404 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lwl47" podStartSLOduration=3.300110134 podStartE2EDuration="5.957386841s" podCreationTimestamp="2025-12-15 09:44:45 +0000 UTC" firstStartedPulling="2025-12-15 09:44:47.899001518 +0000 UTC m=+10413.470144429" lastFinishedPulling="2025-12-15 09:44:50.556278225 +0000 UTC m=+10416.127421136" observedRunningTime="2025-12-15 09:44:50.952279154 +0000 UTC m=+10416.523422065" watchObservedRunningTime="2025-12-15 09:44:50.957386841 +0000 UTC m=+10416.528529772" Dec 15 09:44:56 crc kubenswrapper[4876]: I1215 09:44:56.156252 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:56 crc kubenswrapper[4876]: I1215 09:44:56.156822 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:56 crc kubenswrapper[4876]: I1215 09:44:56.207671 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:57 crc kubenswrapper[4876]: I1215 09:44:57.056858 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:57 crc kubenswrapper[4876]: I1215 09:44:57.110835 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lwl47"] Dec 15 09:44:58 crc kubenswrapper[4876]: I1215 09:44:58.020144 4876 generic.go:334] "Generic (PLEG): container finished" podID="dbb71641-2cea-47e7-af95-5dfe1de074d2" containerID="ba33cac4d099776ca7aaf861e615dedcb0687178c3010d5c346854fe15104478" exitCode=0 Dec 15 09:44:58 crc kubenswrapper[4876]: I1215 09:44:58.020220 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" event={"ID":"dbb71641-2cea-47e7-af95-5dfe1de074d2","Type":"ContainerDied","Data":"ba33cac4d099776ca7aaf861e615dedcb0687178c3010d5c346854fe15104478"} Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.031298 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lwl47" podUID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerName="registry-server" containerID="cri-o://5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335" gracePeriod=2 Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.579426 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.589796 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.735513 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-1\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.735977 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-1\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736054 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-1\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736136 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-catalog-content\") pod \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736194 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ssh-key\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736223 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhz7x\" (UniqueName: \"kubernetes.io/projected/dbb71641-2cea-47e7-af95-5dfe1de074d2-kube-api-access-xhz7x\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736245 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ceph\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736282 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j925t\" (UniqueName: \"kubernetes.io/projected/196eac71-f21b-48dc-9bd9-e17cb8765bd5-kube-api-access-j925t\") pod \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736308 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-combined-ca-bundle\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736358 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-0\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736416 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-inventory\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736471 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-utilities\") pod \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\" (UID: \"196eac71-f21b-48dc-9bd9-e17cb8765bd5\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736513 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-0\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.736537 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-0\") pod \"dbb71641-2cea-47e7-af95-5dfe1de074d2\" (UID: \"dbb71641-2cea-47e7-af95-5dfe1de074d2\") " Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.745338 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-utilities" (OuterVolumeSpecName: "utilities") pod "196eac71-f21b-48dc-9bd9-e17cb8765bd5" (UID: "196eac71-f21b-48dc-9bd9-e17cb8765bd5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.746076 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/196eac71-f21b-48dc-9bd9-e17cb8765bd5-kube-api-access-j925t" (OuterVolumeSpecName: "kube-api-access-j925t") pod "196eac71-f21b-48dc-9bd9-e17cb8765bd5" (UID: "196eac71-f21b-48dc-9bd9-e17cb8765bd5"). InnerVolumeSpecName "kube-api-access-j925t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.757623 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ceph" (OuterVolumeSpecName: "ceph") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.757850 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.788761 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.790947 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.795693 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbb71641-2cea-47e7-af95-5dfe1de074d2-kube-api-access-xhz7x" (OuterVolumeSpecName: "kube-api-access-xhz7x") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "kube-api-access-xhz7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.800983 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.801070 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.802451 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.804021 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "196eac71-f21b-48dc-9bd9-e17cb8765bd5" (UID: "196eac71-f21b-48dc-9bd9-e17cb8765bd5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.817241 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-inventory" (OuterVolumeSpecName: "inventory") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.817506 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839511 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j925t\" (UniqueName: \"kubernetes.io/projected/196eac71-f21b-48dc-9bd9-e17cb8765bd5-kube-api-access-j925t\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839561 4876 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839573 4876 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839585 4876 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-inventory\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839597 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839606 4876 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839615 4876 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839643 4876 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839652 4876 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839660 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/196eac71-f21b-48dc-9bd9-e17cb8765bd5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839669 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839722 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhz7x\" (UniqueName: \"kubernetes.io/projected/dbb71641-2cea-47e7-af95-5dfe1de074d2-kube-api-access-xhz7x\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.839771 4876 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/dbb71641-2cea-47e7-af95-5dfe1de074d2-ceph\") on node \"crc\" DevicePath \"\"" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.840614 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "dbb71641-2cea-47e7-af95-5dfe1de074d2" (UID: "dbb71641-2cea-47e7-af95-5dfe1de074d2"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:44:59 crc kubenswrapper[4876]: I1215 09:44:59.941026 4876 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/dbb71641-2cea-47e7-af95-5dfe1de074d2-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.047962 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.047991 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h" event={"ID":"dbb71641-2cea-47e7-af95-5dfe1de074d2","Type":"ContainerDied","Data":"88c2d0e341a9ab530f51c9b3e4651a6076f93e4128a79480372415067650618d"} Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.048056 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88c2d0e341a9ab530f51c9b3e4651a6076f93e4128a79480372415067650618d" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.052164 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwl47" event={"ID":"196eac71-f21b-48dc-9bd9-e17cb8765bd5","Type":"ContainerDied","Data":"5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335"} Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.052086 4876 generic.go:334] "Generic (PLEG): container finished" podID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerID="5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335" exitCode=0 Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.052265 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lwl47" event={"ID":"196eac71-f21b-48dc-9bd9-e17cb8765bd5","Type":"ContainerDied","Data":"337b0616ef5462401855154178885ab404773169be28d3495d0c1ada95ce18a3"} Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.052213 4876 scope.go:117] "RemoveContainer" containerID="5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.052505 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lwl47" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.089761 4876 scope.go:117] "RemoveContainer" containerID="921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.115364 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lwl47"] Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.127008 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lwl47"] Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.141639 4876 scope.go:117] "RemoveContainer" containerID="38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.186689 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z"] Dec 15 09:45:00 crc kubenswrapper[4876]: E1215 09:45:00.187354 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerName="registry-server" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.187387 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerName="registry-server" Dec 15 09:45:00 crc kubenswrapper[4876]: E1215 09:45:00.187414 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbb71641-2cea-47e7-af95-5dfe1de074d2" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.187425 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbb71641-2cea-47e7-af95-5dfe1de074d2" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Dec 15 09:45:00 crc kubenswrapper[4876]: E1215 09:45:00.187438 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerName="extract-utilities" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.187446 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerName="extract-utilities" Dec 15 09:45:00 crc kubenswrapper[4876]: E1215 09:45:00.187468 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerName="extract-content" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.187476 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerName="extract-content" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.187745 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbb71641-2cea-47e7-af95-5dfe1de074d2" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.187781 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" containerName="registry-server" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.188768 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z"] Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.188872 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.191851 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.192061 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.222203 4876 scope.go:117] "RemoveContainer" containerID="5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335" Dec 15 09:45:00 crc kubenswrapper[4876]: E1215 09:45:00.222716 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335\": container with ID starting with 5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335 not found: ID does not exist" containerID="5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.222757 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335"} err="failed to get container status \"5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335\": rpc error: code = NotFound desc = could not find container \"5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335\": container with ID starting with 5387122366e02107830d85e7320722850bd7b7a4294b554ef294a1157b24f335 not found: ID does not exist" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.222784 4876 scope.go:117] "RemoveContainer" containerID="921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5" Dec 15 09:45:00 crc kubenswrapper[4876]: E1215 09:45:00.223224 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5\": container with ID starting with 921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5 not found: ID does not exist" containerID="921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.223258 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5"} err="failed to get container status \"921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5\": rpc error: code = NotFound desc = could not find container \"921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5\": container with ID starting with 921a9e670b6ca4f42552846ac018e153c9b1ec0bd3797bb27febb4903ee669f5 not found: ID does not exist" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.223279 4876 scope.go:117] "RemoveContainer" containerID="38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069" Dec 15 09:45:00 crc kubenswrapper[4876]: E1215 09:45:00.223633 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069\": container with ID starting with 38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069 not found: ID does not exist" containerID="38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.223654 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069"} err="failed to get container status \"38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069\": rpc error: code = NotFound desc = could not find container \"38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069\": container with ID starting with 38400a6bf12d7e1bdc5b755047a03cf4455c12ac759afc080d909e73cae95069 not found: ID does not exist" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.347666 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-secret-volume\") pod \"collect-profiles-29429865-7b66z\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.347705 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58twj\" (UniqueName: \"kubernetes.io/projected/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-kube-api-access-58twj\") pod \"collect-profiles-29429865-7b66z\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.348024 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-config-volume\") pod \"collect-profiles-29429865-7b66z\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.449633 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-config-volume\") pod \"collect-profiles-29429865-7b66z\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.449777 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-secret-volume\") pod \"collect-profiles-29429865-7b66z\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.449807 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58twj\" (UniqueName: \"kubernetes.io/projected/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-kube-api-access-58twj\") pod \"collect-profiles-29429865-7b66z\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.451355 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-config-volume\") pod \"collect-profiles-29429865-7b66z\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.461760 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-secret-volume\") pod \"collect-profiles-29429865-7b66z\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.470280 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58twj\" (UniqueName: \"kubernetes.io/projected/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-kube-api-access-58twj\") pod \"collect-profiles-29429865-7b66z\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.572784 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:00 crc kubenswrapper[4876]: I1215 09:45:00.719055 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="196eac71-f21b-48dc-9bd9-e17cb8765bd5" path="/var/lib/kubelet/pods/196eac71-f21b-48dc-9bd9-e17cb8765bd5/volumes" Dec 15 09:45:01 crc kubenswrapper[4876]: W1215 09:45:01.023797 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb1d9c4c_56d9_4228_95e4_bc10f78dd05a.slice/crio-65f5981534b9520f16bbe2c9e49f175a1df3432cb1e15161ec78203bca45d26c WatchSource:0}: Error finding container 65f5981534b9520f16bbe2c9e49f175a1df3432cb1e15161ec78203bca45d26c: Status 404 returned error can't find the container with id 65f5981534b9520f16bbe2c9e49f175a1df3432cb1e15161ec78203bca45d26c Dec 15 09:45:01 crc kubenswrapper[4876]: I1215 09:45:01.024772 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z"] Dec 15 09:45:01 crc kubenswrapper[4876]: I1215 09:45:01.064311 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" event={"ID":"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a","Type":"ContainerStarted","Data":"65f5981534b9520f16bbe2c9e49f175a1df3432cb1e15161ec78203bca45d26c"} Dec 15 09:45:02 crc kubenswrapper[4876]: I1215 09:45:02.075387 4876 generic.go:334] "Generic (PLEG): container finished" podID="fb1d9c4c-56d9-4228-95e4-bc10f78dd05a" containerID="a99a0f36c0b39c6ec06642484fb42f216c85d4375131bc758a81e5eaebf4b36a" exitCode=0 Dec 15 09:45:02 crc kubenswrapper[4876]: I1215 09:45:02.075609 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" event={"ID":"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a","Type":"ContainerDied","Data":"a99a0f36c0b39c6ec06642484fb42f216c85d4375131bc758a81e5eaebf4b36a"} Dec 15 09:45:03 crc kubenswrapper[4876]: I1215 09:45:03.425046 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:03 crc kubenswrapper[4876]: I1215 09:45:03.507671 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-secret-volume\") pod \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " Dec 15 09:45:03 crc kubenswrapper[4876]: I1215 09:45:03.507771 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-config-volume\") pod \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " Dec 15 09:45:03 crc kubenswrapper[4876]: I1215 09:45:03.507948 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58twj\" (UniqueName: \"kubernetes.io/projected/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-kube-api-access-58twj\") pod \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\" (UID: \"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a\") " Dec 15 09:45:03 crc kubenswrapper[4876]: I1215 09:45:03.508762 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-config-volume" (OuterVolumeSpecName: "config-volume") pod "fb1d9c4c-56d9-4228-95e4-bc10f78dd05a" (UID: "fb1d9c4c-56d9-4228-95e4-bc10f78dd05a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 09:45:03 crc kubenswrapper[4876]: I1215 09:45:03.513184 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fb1d9c4c-56d9-4228-95e4-bc10f78dd05a" (UID: "fb1d9c4c-56d9-4228-95e4-bc10f78dd05a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:45:03 crc kubenswrapper[4876]: I1215 09:45:03.513835 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-kube-api-access-58twj" (OuterVolumeSpecName: "kube-api-access-58twj") pod "fb1d9c4c-56d9-4228-95e4-bc10f78dd05a" (UID: "fb1d9c4c-56d9-4228-95e4-bc10f78dd05a"). InnerVolumeSpecName "kube-api-access-58twj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:45:03 crc kubenswrapper[4876]: I1215 09:45:03.610950 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58twj\" (UniqueName: \"kubernetes.io/projected/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-kube-api-access-58twj\") on node \"crc\" DevicePath \"\"" Dec 15 09:45:03 crc kubenswrapper[4876]: I1215 09:45:03.611011 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 09:45:03 crc kubenswrapper[4876]: I1215 09:45:03.611020 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.102602 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" event={"ID":"fb1d9c4c-56d9-4228-95e4-bc10f78dd05a","Type":"ContainerDied","Data":"65f5981534b9520f16bbe2c9e49f175a1df3432cb1e15161ec78203bca45d26c"} Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.102942 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65f5981534b9520f16bbe2c9e49f175a1df3432cb1e15161ec78203bca45d26c" Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.103025 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z" Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.504871 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v"] Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.515475 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429820-x8l9v"] Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.720637 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="547569de-dc02-4f05-b8a6-c85c2ae2bf7c" path="/var/lib/kubelet/pods/547569de-dc02-4f05-b8a6-c85c2ae2bf7c/volumes" Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.882774 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4p8n8"] Dec 15 09:45:04 crc kubenswrapper[4876]: E1215 09:45:04.883642 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb1d9c4c-56d9-4228-95e4-bc10f78dd05a" containerName="collect-profiles" Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.883671 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb1d9c4c-56d9-4228-95e4-bc10f78dd05a" containerName="collect-profiles" Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.883912 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb1d9c4c-56d9-4228-95e4-bc10f78dd05a" containerName="collect-profiles" Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.885848 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:04 crc kubenswrapper[4876]: I1215 09:45:04.894313 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4p8n8"] Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.037174 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv8h7\" (UniqueName: \"kubernetes.io/projected/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-kube-api-access-hv8h7\") pod \"redhat-operators-4p8n8\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.037306 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-catalog-content\") pod \"redhat-operators-4p8n8\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.037344 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-utilities\") pod \"redhat-operators-4p8n8\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.139658 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv8h7\" (UniqueName: \"kubernetes.io/projected/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-kube-api-access-hv8h7\") pod \"redhat-operators-4p8n8\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.139816 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-catalog-content\") pod \"redhat-operators-4p8n8\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.139865 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-utilities\") pod \"redhat-operators-4p8n8\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.140461 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-utilities\") pod \"redhat-operators-4p8n8\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.140559 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-catalog-content\") pod \"redhat-operators-4p8n8\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.161768 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv8h7\" (UniqueName: \"kubernetes.io/projected/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-kube-api-access-hv8h7\") pod \"redhat-operators-4p8n8\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.223608 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:05 crc kubenswrapper[4876]: I1215 09:45:05.726301 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4p8n8"] Dec 15 09:45:06 crc kubenswrapper[4876]: I1215 09:45:06.130923 4876 generic.go:334] "Generic (PLEG): container finished" podID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerID="ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676" exitCode=0 Dec 15 09:45:06 crc kubenswrapper[4876]: I1215 09:45:06.130996 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p8n8" event={"ID":"0cd32add-6f79-4549-b2a4-e4737ec7e3bd","Type":"ContainerDied","Data":"ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676"} Dec 15 09:45:06 crc kubenswrapper[4876]: I1215 09:45:06.131183 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p8n8" event={"ID":"0cd32add-6f79-4549-b2a4-e4737ec7e3bd","Type":"ContainerStarted","Data":"fbae6529b044aeff60d02396dacce09283732b95b3aa7ffafe56df35ac116ec0"} Dec 15 09:45:08 crc kubenswrapper[4876]: I1215 09:45:08.178670 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p8n8" event={"ID":"0cd32add-6f79-4549-b2a4-e4737ec7e3bd","Type":"ContainerStarted","Data":"04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513"} Dec 15 09:45:10 crc kubenswrapper[4876]: I1215 09:45:10.200244 4876 generic.go:334] "Generic (PLEG): container finished" podID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerID="04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513" exitCode=0 Dec 15 09:45:10 crc kubenswrapper[4876]: I1215 09:45:10.200361 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p8n8" event={"ID":"0cd32add-6f79-4549-b2a4-e4737ec7e3bd","Type":"ContainerDied","Data":"04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513"} Dec 15 09:45:11 crc kubenswrapper[4876]: I1215 09:45:11.215464 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p8n8" event={"ID":"0cd32add-6f79-4549-b2a4-e4737ec7e3bd","Type":"ContainerStarted","Data":"7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844"} Dec 15 09:45:11 crc kubenswrapper[4876]: I1215 09:45:11.243001 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4p8n8" podStartSLOduration=2.602584727 podStartE2EDuration="7.242980653s" podCreationTimestamp="2025-12-15 09:45:04 +0000 UTC" firstStartedPulling="2025-12-15 09:45:06.132882962 +0000 UTC m=+10431.704025873" lastFinishedPulling="2025-12-15 09:45:10.773278878 +0000 UTC m=+10436.344421799" observedRunningTime="2025-12-15 09:45:11.238096492 +0000 UTC m=+10436.809239403" watchObservedRunningTime="2025-12-15 09:45:11.242980653 +0000 UTC m=+10436.814123564" Dec 15 09:45:13 crc kubenswrapper[4876]: E1215 09:45:13.037750 4876 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.70:35144->38.102.83.70:35145: write tcp 38.102.83.70:35144->38.102.83.70:35145: write: broken pipe Dec 15 09:45:15 crc kubenswrapper[4876]: I1215 09:45:15.224557 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:15 crc kubenswrapper[4876]: I1215 09:45:15.224624 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:16 crc kubenswrapper[4876]: I1215 09:45:16.276227 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4p8n8" podUID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerName="registry-server" probeResult="failure" output=< Dec 15 09:45:16 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 09:45:16 crc kubenswrapper[4876]: > Dec 15 09:45:25 crc kubenswrapper[4876]: I1215 09:45:25.327611 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:25 crc kubenswrapper[4876]: I1215 09:45:25.390796 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:25 crc kubenswrapper[4876]: I1215 09:45:25.575201 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4p8n8"] Dec 15 09:45:26 crc kubenswrapper[4876]: I1215 09:45:26.386679 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4p8n8" podUID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerName="registry-server" containerID="cri-o://7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844" gracePeriod=2 Dec 15 09:45:26 crc kubenswrapper[4876]: I1215 09:45:26.900479 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.049470 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-utilities\") pod \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.049509 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-catalog-content\") pod \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.049711 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hv8h7\" (UniqueName: \"kubernetes.io/projected/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-kube-api-access-hv8h7\") pod \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\" (UID: \"0cd32add-6f79-4549-b2a4-e4737ec7e3bd\") " Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.052368 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-utilities" (OuterVolumeSpecName: "utilities") pod "0cd32add-6f79-4549-b2a4-e4737ec7e3bd" (UID: "0cd32add-6f79-4549-b2a4-e4737ec7e3bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.058949 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-kube-api-access-hv8h7" (OuterVolumeSpecName: "kube-api-access-hv8h7") pod "0cd32add-6f79-4549-b2a4-e4737ec7e3bd" (UID: "0cd32add-6f79-4549-b2a4-e4737ec7e3bd"). InnerVolumeSpecName "kube-api-access-hv8h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.153891 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hv8h7\" (UniqueName: \"kubernetes.io/projected/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-kube-api-access-hv8h7\") on node \"crc\" DevicePath \"\"" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.153937 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.177759 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0cd32add-6f79-4549-b2a4-e4737ec7e3bd" (UID: "0cd32add-6f79-4549-b2a4-e4737ec7e3bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.254853 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0cd32add-6f79-4549-b2a4-e4737ec7e3bd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.323248 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.323308 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.399269 4876 generic.go:334] "Generic (PLEG): container finished" podID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerID="7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844" exitCode=0 Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.399316 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p8n8" event={"ID":"0cd32add-6f79-4549-b2a4-e4737ec7e3bd","Type":"ContainerDied","Data":"7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844"} Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.399353 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p8n8" event={"ID":"0cd32add-6f79-4549-b2a4-e4737ec7e3bd","Type":"ContainerDied","Data":"fbae6529b044aeff60d02396dacce09283732b95b3aa7ffafe56df35ac116ec0"} Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.399371 4876 scope.go:117] "RemoveContainer" containerID="7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.399546 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4p8n8" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.434415 4876 scope.go:117] "RemoveContainer" containerID="04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.437763 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4p8n8"] Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.449550 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4p8n8"] Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.473398 4876 scope.go:117] "RemoveContainer" containerID="ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.499992 4876 scope.go:117] "RemoveContainer" containerID="7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844" Dec 15 09:45:27 crc kubenswrapper[4876]: E1215 09:45:27.501871 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844\": container with ID starting with 7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844 not found: ID does not exist" containerID="7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.501906 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844"} err="failed to get container status \"7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844\": rpc error: code = NotFound desc = could not find container \"7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844\": container with ID starting with 7d0005b019c8d056ddf238f89bb745da91832450246ed19feb85915f95103844 not found: ID does not exist" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.501927 4876 scope.go:117] "RemoveContainer" containerID="04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513" Dec 15 09:45:27 crc kubenswrapper[4876]: E1215 09:45:27.502409 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513\": container with ID starting with 04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513 not found: ID does not exist" containerID="04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.502440 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513"} err="failed to get container status \"04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513\": rpc error: code = NotFound desc = could not find container \"04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513\": container with ID starting with 04c237bda3d4cd7efcbf1d4f471e501abbd399d109f0aa08390fc3dbae1ff513 not found: ID does not exist" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.502460 4876 scope.go:117] "RemoveContainer" containerID="ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676" Dec 15 09:45:27 crc kubenswrapper[4876]: E1215 09:45:27.502732 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676\": container with ID starting with ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676 not found: ID does not exist" containerID="ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676" Dec 15 09:45:27 crc kubenswrapper[4876]: I1215 09:45:27.502835 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676"} err="failed to get container status \"ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676\": rpc error: code = NotFound desc = could not find container \"ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676\": container with ID starting with ba491a3e44f7b11e290856f9d367db19f40adfec4a780e5cfb15ed60b0907676 not found: ID does not exist" Dec 15 09:45:28 crc kubenswrapper[4876]: I1215 09:45:28.715761 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" path="/var/lib/kubelet/pods/0cd32add-6f79-4549-b2a4-e4737ec7e3bd/volumes" Dec 15 09:45:32 crc kubenswrapper[4876]: I1215 09:45:32.352070 4876 scope.go:117] "RemoveContainer" containerID="a5ddd3f9ed1bb2267755dccc145e4171483029c27d3539f4053c8b058acd6275" Dec 15 09:45:57 crc kubenswrapper[4876]: I1215 09:45:57.322943 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:45:57 crc kubenswrapper[4876]: I1215 09:45:57.323551 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:46:27 crc kubenswrapper[4876]: I1215 09:46:27.322463 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:46:27 crc kubenswrapper[4876]: I1215 09:46:27.323072 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:46:27 crc kubenswrapper[4876]: I1215 09:46:27.323222 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:46:27 crc kubenswrapper[4876]: I1215 09:46:27.324320 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:46:27 crc kubenswrapper[4876]: I1215 09:46:27.324423 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" gracePeriod=600 Dec 15 09:46:27 crc kubenswrapper[4876]: E1215 09:46:27.459554 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:46:28 crc kubenswrapper[4876]: I1215 09:46:28.067904 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" exitCode=0 Dec 15 09:46:28 crc kubenswrapper[4876]: I1215 09:46:28.067948 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c"} Dec 15 09:46:28 crc kubenswrapper[4876]: I1215 09:46:28.067977 4876 scope.go:117] "RemoveContainer" containerID="8fc098a01dff252a30a9a0024f79e3ca6a9041be1255fd833c09624f277ff50a" Dec 15 09:46:28 crc kubenswrapper[4876]: I1215 09:46:28.068637 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:46:28 crc kubenswrapper[4876]: E1215 09:46:28.068876 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:46:31 crc kubenswrapper[4876]: I1215 09:46:31.024813 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Dec 15 09:46:31 crc kubenswrapper[4876]: I1215 09:46:31.025334 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="7f902bcf-51d5-4b82-ac4c-2e30136731a4" containerName="adoption" containerID="cri-o://b1e03640a51ce3ca410ef00197a627012e0f742073fc9b79073b47ea370762be" gracePeriod=30 Dec 15 09:46:39 crc kubenswrapper[4876]: I1215 09:46:39.706324 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:46:39 crc kubenswrapper[4876]: E1215 09:46:39.707298 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:46:54 crc kubenswrapper[4876]: I1215 09:46:54.717380 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:46:54 crc kubenswrapper[4876]: E1215 09:46:54.718082 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.473960 4876 generic.go:334] "Generic (PLEG): container finished" podID="7f902bcf-51d5-4b82-ac4c-2e30136731a4" containerID="b1e03640a51ce3ca410ef00197a627012e0f742073fc9b79073b47ea370762be" exitCode=137 Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.474673 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"7f902bcf-51d5-4b82-ac4c-2e30136731a4","Type":"ContainerDied","Data":"b1e03640a51ce3ca410ef00197a627012e0f742073fc9b79073b47ea370762be"} Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.605337 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.709803 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72gtv\" (UniqueName: \"kubernetes.io/projected/7f902bcf-51d5-4b82-ac4c-2e30136731a4-kube-api-access-72gtv\") pod \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\" (UID: \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\") " Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.710645 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\") pod \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\" (UID: \"7f902bcf-51d5-4b82-ac4c-2e30136731a4\") " Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.717941 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f902bcf-51d5-4b82-ac4c-2e30136731a4-kube-api-access-72gtv" (OuterVolumeSpecName: "kube-api-access-72gtv") pod "7f902bcf-51d5-4b82-ac4c-2e30136731a4" (UID: "7f902bcf-51d5-4b82-ac4c-2e30136731a4"). InnerVolumeSpecName "kube-api-access-72gtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.730387 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9" (OuterVolumeSpecName: "mariadb-data") pod "7f902bcf-51d5-4b82-ac4c-2e30136731a4" (UID: "7f902bcf-51d5-4b82-ac4c-2e30136731a4"). InnerVolumeSpecName "pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.814391 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\") on node \"crc\" " Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.814443 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72gtv\" (UniqueName: \"kubernetes.io/projected/7f902bcf-51d5-4b82-ac4c-2e30136731a4-kube-api-access-72gtv\") on node \"crc\" DevicePath \"\"" Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.840292 4876 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.840488 4876 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9") on node "crc" Dec 15 09:47:01 crc kubenswrapper[4876]: I1215 09:47:01.916898 4876 reconciler_common.go:293] "Volume detached for volume \"pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-12bffd7a-973f-4ad1-9009-cde69e6f57d9\") on node \"crc\" DevicePath \"\"" Dec 15 09:47:02 crc kubenswrapper[4876]: I1215 09:47:02.484930 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"7f902bcf-51d5-4b82-ac4c-2e30136731a4","Type":"ContainerDied","Data":"0241653a80f1752166964bdaae3489e6a336b04fd2309382cdd6cbd38decdd9f"} Dec 15 09:47:02 crc kubenswrapper[4876]: I1215 09:47:02.484982 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Dec 15 09:47:02 crc kubenswrapper[4876]: I1215 09:47:02.484987 4876 scope.go:117] "RemoveContainer" containerID="b1e03640a51ce3ca410ef00197a627012e0f742073fc9b79073b47ea370762be" Dec 15 09:47:02 crc kubenswrapper[4876]: I1215 09:47:02.528251 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Dec 15 09:47:02 crc kubenswrapper[4876]: I1215 09:47:02.537681 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Dec 15 09:47:02 crc kubenswrapper[4876]: I1215 09:47:02.718347 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f902bcf-51d5-4b82-ac4c-2e30136731a4" path="/var/lib/kubelet/pods/7f902bcf-51d5-4b82-ac4c-2e30136731a4/volumes" Dec 15 09:47:03 crc kubenswrapper[4876]: I1215 09:47:03.242158 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Dec 15 09:47:03 crc kubenswrapper[4876]: I1215 09:47:03.242811 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="83a33547-3699-45b4-bf4f-989b401d6b95" containerName="adoption" containerID="cri-o://27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1" gracePeriod=30 Dec 15 09:47:08 crc kubenswrapper[4876]: I1215 09:47:08.706401 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:47:08 crc kubenswrapper[4876]: E1215 09:47:08.706908 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:47:19 crc kubenswrapper[4876]: I1215 09:47:19.706118 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:47:19 crc kubenswrapper[4876]: E1215 09:47:19.706905 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:47:32 crc kubenswrapper[4876]: I1215 09:47:32.706335 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:47:32 crc kubenswrapper[4876]: E1215 09:47:32.707082 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.771849 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.831132 4876 generic.go:334] "Generic (PLEG): container finished" podID="83a33547-3699-45b4-bf4f-989b401d6b95" containerID="27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1" exitCode=137 Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.831181 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"83a33547-3699-45b4-bf4f-989b401d6b95","Type":"ContainerDied","Data":"27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1"} Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.831207 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"83a33547-3699-45b4-bf4f-989b401d6b95","Type":"ContainerDied","Data":"e90703f1a548ac1616dede69348295defe6b7ee2d42beeee81ef15369de3e9fe"} Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.831222 4876 scope.go:117] "RemoveContainer" containerID="27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.831336 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.866514 4876 scope.go:117] "RemoveContainer" containerID="27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.867189 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24d750a0-2c75-4182-8cf0-8340f499021d\") pod \"83a33547-3699-45b4-bf4f-989b401d6b95\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.867268 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc95f\" (UniqueName: \"kubernetes.io/projected/83a33547-3699-45b4-bf4f-989b401d6b95-kube-api-access-vc95f\") pod \"83a33547-3699-45b4-bf4f-989b401d6b95\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.867402 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/83a33547-3699-45b4-bf4f-989b401d6b95-ovn-data-cert\") pod \"83a33547-3699-45b4-bf4f-989b401d6b95\" (UID: \"83a33547-3699-45b4-bf4f-989b401d6b95\") " Dec 15 09:47:33 crc kubenswrapper[4876]: E1215 09:47:33.869017 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1\": container with ID starting with 27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1 not found: ID does not exist" containerID="27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.869263 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1"} err="failed to get container status \"27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1\": rpc error: code = NotFound desc = could not find container \"27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1\": container with ID starting with 27a2d9cd25a116487ef12448818f06bee56944b02bf52cbbcdc20b58a8b7e9c1 not found: ID does not exist" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.875555 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83a33547-3699-45b4-bf4f-989b401d6b95-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "83a33547-3699-45b4-bf4f-989b401d6b95" (UID: "83a33547-3699-45b4-bf4f-989b401d6b95"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.875751 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83a33547-3699-45b4-bf4f-989b401d6b95-kube-api-access-vc95f" (OuterVolumeSpecName: "kube-api-access-vc95f") pod "83a33547-3699-45b4-bf4f-989b401d6b95" (UID: "83a33547-3699-45b4-bf4f-989b401d6b95"). InnerVolumeSpecName "kube-api-access-vc95f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.890474 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24d750a0-2c75-4182-8cf0-8340f499021d" (OuterVolumeSpecName: "ovn-data") pod "83a33547-3699-45b4-bf4f-989b401d6b95" (UID: "83a33547-3699-45b4-bf4f-989b401d6b95"). InnerVolumeSpecName "pvc-24d750a0-2c75-4182-8cf0-8340f499021d". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.970819 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-24d750a0-2c75-4182-8cf0-8340f499021d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24d750a0-2c75-4182-8cf0-8340f499021d\") on node \"crc\" " Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.971252 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc95f\" (UniqueName: \"kubernetes.io/projected/83a33547-3699-45b4-bf4f-989b401d6b95-kube-api-access-vc95f\") on node \"crc\" DevicePath \"\"" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.971267 4876 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/83a33547-3699-45b4-bf4f-989b401d6b95-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.995873 4876 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 15 09:47:33 crc kubenswrapper[4876]: I1215 09:47:33.996047 4876 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-24d750a0-2c75-4182-8cf0-8340f499021d" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24d750a0-2c75-4182-8cf0-8340f499021d") on node "crc" Dec 15 09:47:34 crc kubenswrapper[4876]: I1215 09:47:34.072654 4876 reconciler_common.go:293] "Volume detached for volume \"pvc-24d750a0-2c75-4182-8cf0-8340f499021d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-24d750a0-2c75-4182-8cf0-8340f499021d\") on node \"crc\" DevicePath \"\"" Dec 15 09:47:34 crc kubenswrapper[4876]: I1215 09:47:34.174037 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Dec 15 09:47:34 crc kubenswrapper[4876]: I1215 09:47:34.185904 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Dec 15 09:47:34 crc kubenswrapper[4876]: I1215 09:47:34.719637 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83a33547-3699-45b4-bf4f-989b401d6b95" path="/var/lib/kubelet/pods/83a33547-3699-45b4-bf4f-989b401d6b95/volumes" Dec 15 09:47:45 crc kubenswrapper[4876]: I1215 09:47:45.706213 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:47:45 crc kubenswrapper[4876]: E1215 09:47:45.707671 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.092034 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6ds24"] Dec 15 09:47:47 crc kubenswrapper[4876]: E1215 09:47:47.095613 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f902bcf-51d5-4b82-ac4c-2e30136731a4" containerName="adoption" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.095727 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f902bcf-51d5-4b82-ac4c-2e30136731a4" containerName="adoption" Dec 15 09:47:47 crc kubenswrapper[4876]: E1215 09:47:47.095827 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerName="registry-server" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.095914 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerName="registry-server" Dec 15 09:47:47 crc kubenswrapper[4876]: E1215 09:47:47.096015 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83a33547-3699-45b4-bf4f-989b401d6b95" containerName="adoption" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.096122 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="83a33547-3699-45b4-bf4f-989b401d6b95" containerName="adoption" Dec 15 09:47:47 crc kubenswrapper[4876]: E1215 09:47:47.096224 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerName="extract-content" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.096446 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerName="extract-content" Dec 15 09:47:47 crc kubenswrapper[4876]: E1215 09:47:47.097198 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerName="extract-utilities" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.097290 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerName="extract-utilities" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.097870 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cd32add-6f79-4549-b2a4-e4737ec7e3bd" containerName="registry-server" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.098011 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="83a33547-3699-45b4-bf4f-989b401d6b95" containerName="adoption" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.098145 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f902bcf-51d5-4b82-ac4c-2e30136731a4" containerName="adoption" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.100068 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.105712 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6ds24"] Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.138686 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-utilities\") pod \"certified-operators-6ds24\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.138786 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj5fv\" (UniqueName: \"kubernetes.io/projected/f8b5a56b-3e19-4778-8375-80efb69e11ac-kube-api-access-bj5fv\") pod \"certified-operators-6ds24\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.138835 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-catalog-content\") pod \"certified-operators-6ds24\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.240946 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-utilities\") pod \"certified-operators-6ds24\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.241024 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj5fv\" (UniqueName: \"kubernetes.io/projected/f8b5a56b-3e19-4778-8375-80efb69e11ac-kube-api-access-bj5fv\") pod \"certified-operators-6ds24\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.241058 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-catalog-content\") pod \"certified-operators-6ds24\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.241603 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-catalog-content\") pod \"certified-operators-6ds24\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.241807 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-utilities\") pod \"certified-operators-6ds24\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.259855 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj5fv\" (UniqueName: \"kubernetes.io/projected/f8b5a56b-3e19-4778-8375-80efb69e11ac-kube-api-access-bj5fv\") pod \"certified-operators-6ds24\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.425160 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:47 crc kubenswrapper[4876]: I1215 09:47:47.980073 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6ds24"] Dec 15 09:47:48 crc kubenswrapper[4876]: I1215 09:47:48.982961 4876 generic.go:334] "Generic (PLEG): container finished" podID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerID="52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb" exitCode=0 Dec 15 09:47:48 crc kubenswrapper[4876]: I1215 09:47:48.983092 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ds24" event={"ID":"f8b5a56b-3e19-4778-8375-80efb69e11ac","Type":"ContainerDied","Data":"52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb"} Dec 15 09:47:48 crc kubenswrapper[4876]: I1215 09:47:48.983501 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ds24" event={"ID":"f8b5a56b-3e19-4778-8375-80efb69e11ac","Type":"ContainerStarted","Data":"70147dde33011654af1e8c4a59a2b291890e68188bf7dd5d89650c78a085e997"} Dec 15 09:47:48 crc kubenswrapper[4876]: I1215 09:47:48.986578 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 09:47:51 crc kubenswrapper[4876]: I1215 09:47:51.002757 4876 generic.go:334] "Generic (PLEG): container finished" podID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerID="f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498" exitCode=0 Dec 15 09:47:51 crc kubenswrapper[4876]: I1215 09:47:51.002830 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ds24" event={"ID":"f8b5a56b-3e19-4778-8375-80efb69e11ac","Type":"ContainerDied","Data":"f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498"} Dec 15 09:47:52 crc kubenswrapper[4876]: I1215 09:47:52.015013 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ds24" event={"ID":"f8b5a56b-3e19-4778-8375-80efb69e11ac","Type":"ContainerStarted","Data":"3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9"} Dec 15 09:47:52 crc kubenswrapper[4876]: I1215 09:47:52.035881 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6ds24" podStartSLOduration=2.357048581 podStartE2EDuration="5.035858555s" podCreationTimestamp="2025-12-15 09:47:47 +0000 UTC" firstStartedPulling="2025-12-15 09:47:48.986317279 +0000 UTC m=+10594.557460190" lastFinishedPulling="2025-12-15 09:47:51.665127243 +0000 UTC m=+10597.236270164" observedRunningTime="2025-12-15 09:47:52.032766232 +0000 UTC m=+10597.603909153" watchObservedRunningTime="2025-12-15 09:47:52.035858555 +0000 UTC m=+10597.607001486" Dec 15 09:47:56 crc kubenswrapper[4876]: I1215 09:47:56.706981 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:47:56 crc kubenswrapper[4876]: E1215 09:47:56.708468 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:47:57 crc kubenswrapper[4876]: I1215 09:47:57.425487 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:57 crc kubenswrapper[4876]: I1215 09:47:57.425574 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:57 crc kubenswrapper[4876]: I1215 09:47:57.476227 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:58 crc kubenswrapper[4876]: I1215 09:47:58.121286 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:47:58 crc kubenswrapper[4876]: I1215 09:47:58.184533 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6ds24"] Dec 15 09:48:00 crc kubenswrapper[4876]: I1215 09:48:00.096974 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6ds24" podUID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerName="registry-server" containerID="cri-o://3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9" gracePeriod=2 Dec 15 09:48:00 crc kubenswrapper[4876]: I1215 09:48:00.587319 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:48:00 crc kubenswrapper[4876]: I1215 09:48:00.715399 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-utilities\") pod \"f8b5a56b-3e19-4778-8375-80efb69e11ac\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " Dec 15 09:48:00 crc kubenswrapper[4876]: I1215 09:48:00.715698 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-catalog-content\") pod \"f8b5a56b-3e19-4778-8375-80efb69e11ac\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " Dec 15 09:48:00 crc kubenswrapper[4876]: I1215 09:48:00.715756 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bj5fv\" (UniqueName: \"kubernetes.io/projected/f8b5a56b-3e19-4778-8375-80efb69e11ac-kube-api-access-bj5fv\") pod \"f8b5a56b-3e19-4778-8375-80efb69e11ac\" (UID: \"f8b5a56b-3e19-4778-8375-80efb69e11ac\") " Dec 15 09:48:00 crc kubenswrapper[4876]: I1215 09:48:00.716593 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-utilities" (OuterVolumeSpecName: "utilities") pod "f8b5a56b-3e19-4778-8375-80efb69e11ac" (UID: "f8b5a56b-3e19-4778-8375-80efb69e11ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:48:00 crc kubenswrapper[4876]: I1215 09:48:00.721089 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8b5a56b-3e19-4778-8375-80efb69e11ac-kube-api-access-bj5fv" (OuterVolumeSpecName: "kube-api-access-bj5fv") pod "f8b5a56b-3e19-4778-8375-80efb69e11ac" (UID: "f8b5a56b-3e19-4778-8375-80efb69e11ac"). InnerVolumeSpecName "kube-api-access-bj5fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:48:00 crc kubenswrapper[4876]: I1215 09:48:00.818789 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bj5fv\" (UniqueName: \"kubernetes.io/projected/f8b5a56b-3e19-4778-8375-80efb69e11ac-kube-api-access-bj5fv\") on node \"crc\" DevicePath \"\"" Dec 15 09:48:00 crc kubenswrapper[4876]: I1215 09:48:00.818826 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.062027 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8b5a56b-3e19-4778-8375-80efb69e11ac" (UID: "f8b5a56b-3e19-4778-8375-80efb69e11ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.110690 4876 generic.go:334] "Generic (PLEG): container finished" podID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerID="3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9" exitCode=0 Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.110747 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6ds24" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.110743 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ds24" event={"ID":"f8b5a56b-3e19-4778-8375-80efb69e11ac","Type":"ContainerDied","Data":"3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9"} Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.110893 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6ds24" event={"ID":"f8b5a56b-3e19-4778-8375-80efb69e11ac","Type":"ContainerDied","Data":"70147dde33011654af1e8c4a59a2b291890e68188bf7dd5d89650c78a085e997"} Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.110925 4876 scope.go:117] "RemoveContainer" containerID="3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.125639 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8b5a56b-3e19-4778-8375-80efb69e11ac-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.134731 4876 scope.go:117] "RemoveContainer" containerID="f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.153694 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6ds24"] Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.167779 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6ds24"] Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.169494 4876 scope.go:117] "RemoveContainer" containerID="52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.221269 4876 scope.go:117] "RemoveContainer" containerID="3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9" Dec 15 09:48:01 crc kubenswrapper[4876]: E1215 09:48:01.221905 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9\": container with ID starting with 3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9 not found: ID does not exist" containerID="3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.221946 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9"} err="failed to get container status \"3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9\": rpc error: code = NotFound desc = could not find container \"3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9\": container with ID starting with 3c29eb6fee246c6e4cf5ecdf4e74956851f3957539321e32a8481707608735c9 not found: ID does not exist" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.221976 4876 scope.go:117] "RemoveContainer" containerID="f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498" Dec 15 09:48:01 crc kubenswrapper[4876]: E1215 09:48:01.222736 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498\": container with ID starting with f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498 not found: ID does not exist" containerID="f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.222796 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498"} err="failed to get container status \"f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498\": rpc error: code = NotFound desc = could not find container \"f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498\": container with ID starting with f51b6ab157a876d6479f99bf654c33dcf30f63a680e9b4476051782fd45b9498 not found: ID does not exist" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.222828 4876 scope.go:117] "RemoveContainer" containerID="52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb" Dec 15 09:48:01 crc kubenswrapper[4876]: E1215 09:48:01.223220 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb\": container with ID starting with 52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb not found: ID does not exist" containerID="52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb" Dec 15 09:48:01 crc kubenswrapper[4876]: I1215 09:48:01.223250 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb"} err="failed to get container status \"52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb\": rpc error: code = NotFound desc = could not find container \"52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb\": container with ID starting with 52c318605cbeb6c6c8db07a42e85a09fac45a832a503415092574e4888d526fb not found: ID does not exist" Dec 15 09:48:02 crc kubenswrapper[4876]: I1215 09:48:02.718611 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8b5a56b-3e19-4778-8375-80efb69e11ac" path="/var/lib/kubelet/pods/f8b5a56b-3e19-4778-8375-80efb69e11ac/volumes" Dec 15 09:48:09 crc kubenswrapper[4876]: I1215 09:48:09.705849 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:48:09 crc kubenswrapper[4876]: E1215 09:48:09.706487 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:48:21 crc kubenswrapper[4876]: I1215 09:48:21.706147 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:48:21 crc kubenswrapper[4876]: E1215 09:48:21.706919 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:48:36 crc kubenswrapper[4876]: I1215 09:48:36.706178 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:48:36 crc kubenswrapper[4876]: E1215 09:48:36.707122 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:48:50 crc kubenswrapper[4876]: I1215 09:48:50.706627 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:48:50 crc kubenswrapper[4876]: E1215 09:48:50.707515 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:49:05 crc kubenswrapper[4876]: I1215 09:49:05.706381 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:49:05 crc kubenswrapper[4876]: E1215 09:49:05.707326 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:49:16 crc kubenswrapper[4876]: I1215 09:49:16.706064 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:49:16 crc kubenswrapper[4876]: E1215 09:49:16.708727 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:49:28 crc kubenswrapper[4876]: I1215 09:49:28.706205 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:49:28 crc kubenswrapper[4876]: E1215 09:49:28.708415 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:49:43 crc kubenswrapper[4876]: I1215 09:49:43.707325 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:49:43 crc kubenswrapper[4876]: E1215 09:49:43.708728 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:49:56 crc kubenswrapper[4876]: I1215 09:49:56.706161 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:49:56 crc kubenswrapper[4876]: E1215 09:49:56.708237 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:50:10 crc kubenswrapper[4876]: I1215 09:50:10.705491 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:50:10 crc kubenswrapper[4876]: E1215 09:50:10.706641 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:50:21 crc kubenswrapper[4876]: I1215 09:50:21.706848 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:50:21 crc kubenswrapper[4876]: E1215 09:50:21.710304 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:50:33 crc kubenswrapper[4876]: I1215 09:50:33.705583 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:50:33 crc kubenswrapper[4876]: E1215 09:50:33.706585 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:50:44 crc kubenswrapper[4876]: I1215 09:50:44.714686 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:50:44 crc kubenswrapper[4876]: E1215 09:50:44.715786 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:50:58 crc kubenswrapper[4876]: I1215 09:50:58.705861 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:50:58 crc kubenswrapper[4876]: E1215 09:50:58.706882 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:51:12 crc kubenswrapper[4876]: I1215 09:51:12.707412 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:51:12 crc kubenswrapper[4876]: E1215 09:51:12.708245 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:51:26 crc kubenswrapper[4876]: I1215 09:51:26.707060 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:51:26 crc kubenswrapper[4876]: E1215 09:51:26.707901 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.627120 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mwp9w"] Dec 15 09:51:36 crc kubenswrapper[4876]: E1215 09:51:36.628012 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerName="registry-server" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.628032 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerName="registry-server" Dec 15 09:51:36 crc kubenswrapper[4876]: E1215 09:51:36.628050 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerName="extract-utilities" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.628057 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerName="extract-utilities" Dec 15 09:51:36 crc kubenswrapper[4876]: E1215 09:51:36.628075 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerName="extract-content" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.628083 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerName="extract-content" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.628299 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8b5a56b-3e19-4778-8375-80efb69e11ac" containerName="registry-server" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.629707 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.640145 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mwp9w"] Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.779084 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5r4rh\" (UniqueName: \"kubernetes.io/projected/cebee846-330f-437d-a20f-1039e98490b4-kube-api-access-5r4rh\") pod \"community-operators-mwp9w\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.779163 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-utilities\") pod \"community-operators-mwp9w\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.779907 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-catalog-content\") pod \"community-operators-mwp9w\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.881865 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-catalog-content\") pod \"community-operators-mwp9w\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.881982 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5r4rh\" (UniqueName: \"kubernetes.io/projected/cebee846-330f-437d-a20f-1039e98490b4-kube-api-access-5r4rh\") pod \"community-operators-mwp9w\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.882008 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-utilities\") pod \"community-operators-mwp9w\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.882417 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-utilities\") pod \"community-operators-mwp9w\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.882674 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-catalog-content\") pod \"community-operators-mwp9w\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.904480 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5r4rh\" (UniqueName: \"kubernetes.io/projected/cebee846-330f-437d-a20f-1039e98490b4-kube-api-access-5r4rh\") pod \"community-operators-mwp9w\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:36 crc kubenswrapper[4876]: I1215 09:51:36.959910 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:37 crc kubenswrapper[4876]: I1215 09:51:37.490022 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mwp9w"] Dec 15 09:51:37 crc kubenswrapper[4876]: I1215 09:51:37.509498 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwp9w" event={"ID":"cebee846-330f-437d-a20f-1039e98490b4","Type":"ContainerStarted","Data":"4f70b3d5585037ad1511985fe6a2287039854a6b276f1e0d86f5de7f22add8b9"} Dec 15 09:51:38 crc kubenswrapper[4876]: I1215 09:51:38.525426 4876 generic.go:334] "Generic (PLEG): container finished" podID="cebee846-330f-437d-a20f-1039e98490b4" containerID="02f1731bf581f7a793a99a5cd6e7a94628dfd2aed59ca3805d0255e35386c31c" exitCode=0 Dec 15 09:51:38 crc kubenswrapper[4876]: I1215 09:51:38.525507 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwp9w" event={"ID":"cebee846-330f-437d-a20f-1039e98490b4","Type":"ContainerDied","Data":"02f1731bf581f7a793a99a5cd6e7a94628dfd2aed59ca3805d0255e35386c31c"} Dec 15 09:51:40 crc kubenswrapper[4876]: I1215 09:51:40.550904 4876 generic.go:334] "Generic (PLEG): container finished" podID="cebee846-330f-437d-a20f-1039e98490b4" containerID="b8542d1c8ea43d3fb260cae590eb07f3a8fc892cdedebaff0e47498c5ab1cebc" exitCode=0 Dec 15 09:51:40 crc kubenswrapper[4876]: I1215 09:51:40.550961 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwp9w" event={"ID":"cebee846-330f-437d-a20f-1039e98490b4","Type":"ContainerDied","Data":"b8542d1c8ea43d3fb260cae590eb07f3a8fc892cdedebaff0e47498c5ab1cebc"} Dec 15 09:51:40 crc kubenswrapper[4876]: I1215 09:51:40.705732 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:51:41 crc kubenswrapper[4876]: I1215 09:51:41.567379 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"e2c6ad27d6269068a442b20ce143980a63289eb9976894cf50b1e429e62ae822"} Dec 15 09:51:41 crc kubenswrapper[4876]: I1215 09:51:41.572468 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwp9w" event={"ID":"cebee846-330f-437d-a20f-1039e98490b4","Type":"ContainerStarted","Data":"bef2f82a0700da52be62f0ab90d6f02ecfa01affa6a20a631c0d179fb4a306ac"} Dec 15 09:51:41 crc kubenswrapper[4876]: I1215 09:51:41.627997 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mwp9w" podStartSLOduration=3.04162522 podStartE2EDuration="5.62792461s" podCreationTimestamp="2025-12-15 09:51:36 +0000 UTC" firstStartedPulling="2025-12-15 09:51:38.528079581 +0000 UTC m=+10824.099222492" lastFinishedPulling="2025-12-15 09:51:41.114378951 +0000 UTC m=+10826.685521882" observedRunningTime="2025-12-15 09:51:41.609524556 +0000 UTC m=+10827.180667487" watchObservedRunningTime="2025-12-15 09:51:41.62792461 +0000 UTC m=+10827.199067541" Dec 15 09:51:46 crc kubenswrapper[4876]: I1215 09:51:46.960928 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:46 crc kubenswrapper[4876]: I1215 09:51:46.961470 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:47 crc kubenswrapper[4876]: I1215 09:51:47.058382 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:47 crc kubenswrapper[4876]: I1215 09:51:47.685190 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:47 crc kubenswrapper[4876]: I1215 09:51:47.749881 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mwp9w"] Dec 15 09:51:49 crc kubenswrapper[4876]: I1215 09:51:49.665906 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mwp9w" podUID="cebee846-330f-437d-a20f-1039e98490b4" containerName="registry-server" containerID="cri-o://bef2f82a0700da52be62f0ab90d6f02ecfa01affa6a20a631c0d179fb4a306ac" gracePeriod=2 Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.679061 4876 generic.go:334] "Generic (PLEG): container finished" podID="cebee846-330f-437d-a20f-1039e98490b4" containerID="bef2f82a0700da52be62f0ab90d6f02ecfa01affa6a20a631c0d179fb4a306ac" exitCode=0 Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.679152 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwp9w" event={"ID":"cebee846-330f-437d-a20f-1039e98490b4","Type":"ContainerDied","Data":"bef2f82a0700da52be62f0ab90d6f02ecfa01affa6a20a631c0d179fb4a306ac"} Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.679430 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwp9w" event={"ID":"cebee846-330f-437d-a20f-1039e98490b4","Type":"ContainerDied","Data":"4f70b3d5585037ad1511985fe6a2287039854a6b276f1e0d86f5de7f22add8b9"} Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.679448 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f70b3d5585037ad1511985fe6a2287039854a6b276f1e0d86f5de7f22add8b9" Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.750340 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.908591 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-utilities\") pod \"cebee846-330f-437d-a20f-1039e98490b4\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.908971 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5r4rh\" (UniqueName: \"kubernetes.io/projected/cebee846-330f-437d-a20f-1039e98490b4-kube-api-access-5r4rh\") pod \"cebee846-330f-437d-a20f-1039e98490b4\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.909124 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-catalog-content\") pod \"cebee846-330f-437d-a20f-1039e98490b4\" (UID: \"cebee846-330f-437d-a20f-1039e98490b4\") " Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.909673 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-utilities" (OuterVolumeSpecName: "utilities") pod "cebee846-330f-437d-a20f-1039e98490b4" (UID: "cebee846-330f-437d-a20f-1039e98490b4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.924074 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cebee846-330f-437d-a20f-1039e98490b4-kube-api-access-5r4rh" (OuterVolumeSpecName: "kube-api-access-5r4rh") pod "cebee846-330f-437d-a20f-1039e98490b4" (UID: "cebee846-330f-437d-a20f-1039e98490b4"). InnerVolumeSpecName "kube-api-access-5r4rh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:51:50 crc kubenswrapper[4876]: I1215 09:51:50.982786 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cebee846-330f-437d-a20f-1039e98490b4" (UID: "cebee846-330f-437d-a20f-1039e98490b4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:51:51 crc kubenswrapper[4876]: I1215 09:51:51.011183 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:51:51 crc kubenswrapper[4876]: I1215 09:51:51.011219 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5r4rh\" (UniqueName: \"kubernetes.io/projected/cebee846-330f-437d-a20f-1039e98490b4-kube-api-access-5r4rh\") on node \"crc\" DevicePath \"\"" Dec 15 09:51:51 crc kubenswrapper[4876]: I1215 09:51:51.011229 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cebee846-330f-437d-a20f-1039e98490b4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:51:51 crc kubenswrapper[4876]: I1215 09:51:51.692252 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwp9w" Dec 15 09:51:51 crc kubenswrapper[4876]: I1215 09:51:51.745701 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mwp9w"] Dec 15 09:51:51 crc kubenswrapper[4876]: I1215 09:51:51.759917 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mwp9w"] Dec 15 09:51:52 crc kubenswrapper[4876]: I1215 09:51:52.721835 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cebee846-330f-437d-a20f-1039e98490b4" path="/var/lib/kubelet/pods/cebee846-330f-437d-a20f-1039e98490b4/volumes" Dec 15 09:53:57 crc kubenswrapper[4876]: I1215 09:53:57.323288 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:53:57 crc kubenswrapper[4876]: I1215 09:53:57.324031 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:54:27 crc kubenswrapper[4876]: I1215 09:54:27.323254 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:54:27 crc kubenswrapper[4876]: I1215 09:54:27.323959 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:54:57 crc kubenswrapper[4876]: I1215 09:54:57.323528 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:54:57 crc kubenswrapper[4876]: I1215 09:54:57.323997 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:54:57 crc kubenswrapper[4876]: I1215 09:54:57.324057 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:54:57 crc kubenswrapper[4876]: I1215 09:54:57.324868 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e2c6ad27d6269068a442b20ce143980a63289eb9976894cf50b1e429e62ae822"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:54:57 crc kubenswrapper[4876]: I1215 09:54:57.324920 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://e2c6ad27d6269068a442b20ce143980a63289eb9976894cf50b1e429e62ae822" gracePeriod=600 Dec 15 09:54:57 crc kubenswrapper[4876]: I1215 09:54:57.784481 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="e2c6ad27d6269068a442b20ce143980a63289eb9976894cf50b1e429e62ae822" exitCode=0 Dec 15 09:54:57 crc kubenswrapper[4876]: I1215 09:54:57.784522 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"e2c6ad27d6269068a442b20ce143980a63289eb9976894cf50b1e429e62ae822"} Dec 15 09:54:57 crc kubenswrapper[4876]: I1215 09:54:57.784866 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28"} Dec 15 09:54:57 crc kubenswrapper[4876]: I1215 09:54:57.784889 4876 scope.go:117] "RemoveContainer" containerID="a3bde17b4cd600c7b59e14a6f47afd6ce3d1071f985c279b3faa4591f0d1be5c" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.396122 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bdct9"] Dec 15 09:55:26 crc kubenswrapper[4876]: E1215 09:55:26.397252 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cebee846-330f-437d-a20f-1039e98490b4" containerName="registry-server" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.397275 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="cebee846-330f-437d-a20f-1039e98490b4" containerName="registry-server" Dec 15 09:55:26 crc kubenswrapper[4876]: E1215 09:55:26.397289 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cebee846-330f-437d-a20f-1039e98490b4" containerName="extract-content" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.397299 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="cebee846-330f-437d-a20f-1039e98490b4" containerName="extract-content" Dec 15 09:55:26 crc kubenswrapper[4876]: E1215 09:55:26.397311 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cebee846-330f-437d-a20f-1039e98490b4" containerName="extract-utilities" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.397319 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="cebee846-330f-437d-a20f-1039e98490b4" containerName="extract-utilities" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.397593 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="cebee846-330f-437d-a20f-1039e98490b4" containerName="registry-server" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.399405 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.415195 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bdct9"] Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.569117 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-catalog-content\") pod \"redhat-operators-bdct9\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.569464 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-utilities\") pod \"redhat-operators-bdct9\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.569633 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrtvx\" (UniqueName: \"kubernetes.io/projected/0fd4b92c-ea48-48c8-96a3-6be4685d0825-kube-api-access-wrtvx\") pod \"redhat-operators-bdct9\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.671336 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-catalog-content\") pod \"redhat-operators-bdct9\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.671697 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-utilities\") pod \"redhat-operators-bdct9\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.671862 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrtvx\" (UniqueName: \"kubernetes.io/projected/0fd4b92c-ea48-48c8-96a3-6be4685d0825-kube-api-access-wrtvx\") pod \"redhat-operators-bdct9\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.672038 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-catalog-content\") pod \"redhat-operators-bdct9\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.672093 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-utilities\") pod \"redhat-operators-bdct9\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.694078 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrtvx\" (UniqueName: \"kubernetes.io/projected/0fd4b92c-ea48-48c8-96a3-6be4685d0825-kube-api-access-wrtvx\") pod \"redhat-operators-bdct9\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:26 crc kubenswrapper[4876]: I1215 09:55:26.727241 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:27 crc kubenswrapper[4876]: I1215 09:55:27.197526 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bdct9"] Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.111898 4876 generic.go:334] "Generic (PLEG): container finished" podID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerID="a89f6ef35cbdf2f432866d8afe4118b875edd29370b0f34d8035b5b7d0d3832a" exitCode=0 Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.111951 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bdct9" event={"ID":"0fd4b92c-ea48-48c8-96a3-6be4685d0825","Type":"ContainerDied","Data":"a89f6ef35cbdf2f432866d8afe4118b875edd29370b0f34d8035b5b7d0d3832a"} Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.112276 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bdct9" event={"ID":"0fd4b92c-ea48-48c8-96a3-6be4685d0825","Type":"ContainerStarted","Data":"c643f7e603bd7c215542c49e4a9e2168d9da086786a15e24620910b1741fdad5"} Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.113995 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.207358 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wsdgt"] Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.210862 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.220563 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wsdgt"] Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.309796 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-utilities\") pod \"redhat-marketplace-wsdgt\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.310462 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs686\" (UniqueName: \"kubernetes.io/projected/dc82c1a9-5eff-4559-a909-d53b3bc90752-kube-api-access-gs686\") pod \"redhat-marketplace-wsdgt\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.310560 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-catalog-content\") pod \"redhat-marketplace-wsdgt\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.412356 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs686\" (UniqueName: \"kubernetes.io/projected/dc82c1a9-5eff-4559-a909-d53b3bc90752-kube-api-access-gs686\") pod \"redhat-marketplace-wsdgt\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.412413 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-catalog-content\") pod \"redhat-marketplace-wsdgt\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.412447 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-utilities\") pod \"redhat-marketplace-wsdgt\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.413092 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-utilities\") pod \"redhat-marketplace-wsdgt\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.413377 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-catalog-content\") pod \"redhat-marketplace-wsdgt\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.439201 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs686\" (UniqueName: \"kubernetes.io/projected/dc82c1a9-5eff-4559-a909-d53b3bc90752-kube-api-access-gs686\") pod \"redhat-marketplace-wsdgt\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:28 crc kubenswrapper[4876]: I1215 09:55:28.535318 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:29 crc kubenswrapper[4876]: I1215 09:55:29.045164 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wsdgt"] Dec 15 09:55:29 crc kubenswrapper[4876]: W1215 09:55:29.047464 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc82c1a9_5eff_4559_a909_d53b3bc90752.slice/crio-4c5306a5254efb8f5d748b0fd811d770f4bc958c4368d75aa3a186f8e017bcfd WatchSource:0}: Error finding container 4c5306a5254efb8f5d748b0fd811d770f4bc958c4368d75aa3a186f8e017bcfd: Status 404 returned error can't find the container with id 4c5306a5254efb8f5d748b0fd811d770f4bc958c4368d75aa3a186f8e017bcfd Dec 15 09:55:29 crc kubenswrapper[4876]: I1215 09:55:29.122580 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wsdgt" event={"ID":"dc82c1a9-5eff-4559-a909-d53b3bc90752","Type":"ContainerStarted","Data":"4c5306a5254efb8f5d748b0fd811d770f4bc958c4368d75aa3a186f8e017bcfd"} Dec 15 09:55:30 crc kubenswrapper[4876]: I1215 09:55:30.135619 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bdct9" event={"ID":"0fd4b92c-ea48-48c8-96a3-6be4685d0825","Type":"ContainerStarted","Data":"1e559980779b44bbb779fb220bcd9b4a58b761476cc1f021041f562d5377842c"} Dec 15 09:55:30 crc kubenswrapper[4876]: I1215 09:55:30.139084 4876 generic.go:334] "Generic (PLEG): container finished" podID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerID="77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a" exitCode=0 Dec 15 09:55:30 crc kubenswrapper[4876]: I1215 09:55:30.139141 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wsdgt" event={"ID":"dc82c1a9-5eff-4559-a909-d53b3bc90752","Type":"ContainerDied","Data":"77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a"} Dec 15 09:55:31 crc kubenswrapper[4876]: I1215 09:55:31.163117 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wsdgt" event={"ID":"dc82c1a9-5eff-4559-a909-d53b3bc90752","Type":"ContainerStarted","Data":"67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b"} Dec 15 09:55:33 crc kubenswrapper[4876]: I1215 09:55:33.183881 4876 generic.go:334] "Generic (PLEG): container finished" podID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerID="1e559980779b44bbb779fb220bcd9b4a58b761476cc1f021041f562d5377842c" exitCode=0 Dec 15 09:55:33 crc kubenswrapper[4876]: I1215 09:55:33.183978 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bdct9" event={"ID":"0fd4b92c-ea48-48c8-96a3-6be4685d0825","Type":"ContainerDied","Data":"1e559980779b44bbb779fb220bcd9b4a58b761476cc1f021041f562d5377842c"} Dec 15 09:55:33 crc kubenswrapper[4876]: I1215 09:55:33.187838 4876 generic.go:334] "Generic (PLEG): container finished" podID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerID="67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b" exitCode=0 Dec 15 09:55:33 crc kubenswrapper[4876]: I1215 09:55:33.187881 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wsdgt" event={"ID":"dc82c1a9-5eff-4559-a909-d53b3bc90752","Type":"ContainerDied","Data":"67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b"} Dec 15 09:55:34 crc kubenswrapper[4876]: I1215 09:55:34.198415 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bdct9" event={"ID":"0fd4b92c-ea48-48c8-96a3-6be4685d0825","Type":"ContainerStarted","Data":"04491ca707fcf763acc117203f810317287c1eb66df083d1f6942b1c5878d741"} Dec 15 09:55:34 crc kubenswrapper[4876]: I1215 09:55:34.200993 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wsdgt" event={"ID":"dc82c1a9-5eff-4559-a909-d53b3bc90752","Type":"ContainerStarted","Data":"0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309"} Dec 15 09:55:34 crc kubenswrapper[4876]: I1215 09:55:34.226796 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bdct9" podStartSLOduration=2.378298842 podStartE2EDuration="8.226775661s" podCreationTimestamp="2025-12-15 09:55:26 +0000 UTC" firstStartedPulling="2025-12-15 09:55:28.113747554 +0000 UTC m=+11053.684890455" lastFinishedPulling="2025-12-15 09:55:33.962224363 +0000 UTC m=+11059.533367274" observedRunningTime="2025-12-15 09:55:34.219818895 +0000 UTC m=+11059.790961806" watchObservedRunningTime="2025-12-15 09:55:34.226775661 +0000 UTC m=+11059.797918592" Dec 15 09:55:34 crc kubenswrapper[4876]: I1215 09:55:34.242848 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wsdgt" podStartSLOduration=2.619869046 podStartE2EDuration="6.242832641s" podCreationTimestamp="2025-12-15 09:55:28 +0000 UTC" firstStartedPulling="2025-12-15 09:55:30.140416451 +0000 UTC m=+11055.711559362" lastFinishedPulling="2025-12-15 09:55:33.763380036 +0000 UTC m=+11059.334522957" observedRunningTime="2025-12-15 09:55:34.237485567 +0000 UTC m=+11059.808628478" watchObservedRunningTime="2025-12-15 09:55:34.242832641 +0000 UTC m=+11059.813975552" Dec 15 09:55:36 crc kubenswrapper[4876]: I1215 09:55:36.728745 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:36 crc kubenswrapper[4876]: I1215 09:55:36.730657 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:37 crc kubenswrapper[4876]: I1215 09:55:37.787664 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-bdct9" podUID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerName="registry-server" probeResult="failure" output=< Dec 15 09:55:37 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 09:55:37 crc kubenswrapper[4876]: > Dec 15 09:55:38 crc kubenswrapper[4876]: I1215 09:55:38.535798 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:38 crc kubenswrapper[4876]: I1215 09:55:38.535846 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:38 crc kubenswrapper[4876]: I1215 09:55:38.596677 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:39 crc kubenswrapper[4876]: I1215 09:55:39.306622 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:39 crc kubenswrapper[4876]: I1215 09:55:39.805182 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wsdgt"] Dec 15 09:55:41 crc kubenswrapper[4876]: I1215 09:55:41.268264 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wsdgt" podUID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerName="registry-server" containerID="cri-o://0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309" gracePeriod=2 Dec 15 09:55:41 crc kubenswrapper[4876]: I1215 09:55:41.840592 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.016385 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-catalog-content\") pod \"dc82c1a9-5eff-4559-a909-d53b3bc90752\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.016567 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gs686\" (UniqueName: \"kubernetes.io/projected/dc82c1a9-5eff-4559-a909-d53b3bc90752-kube-api-access-gs686\") pod \"dc82c1a9-5eff-4559-a909-d53b3bc90752\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.016640 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-utilities\") pod \"dc82c1a9-5eff-4559-a909-d53b3bc90752\" (UID: \"dc82c1a9-5eff-4559-a909-d53b3bc90752\") " Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.017591 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-utilities" (OuterVolumeSpecName: "utilities") pod "dc82c1a9-5eff-4559-a909-d53b3bc90752" (UID: "dc82c1a9-5eff-4559-a909-d53b3bc90752"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.022183 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc82c1a9-5eff-4559-a909-d53b3bc90752-kube-api-access-gs686" (OuterVolumeSpecName: "kube-api-access-gs686") pod "dc82c1a9-5eff-4559-a909-d53b3bc90752" (UID: "dc82c1a9-5eff-4559-a909-d53b3bc90752"). InnerVolumeSpecName "kube-api-access-gs686". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.050164 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc82c1a9-5eff-4559-a909-d53b3bc90752" (UID: "dc82c1a9-5eff-4559-a909-d53b3bc90752"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.119319 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.119351 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gs686\" (UniqueName: \"kubernetes.io/projected/dc82c1a9-5eff-4559-a909-d53b3bc90752-kube-api-access-gs686\") on node \"crc\" DevicePath \"\"" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.119363 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc82c1a9-5eff-4559-a909-d53b3bc90752-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.278673 4876 generic.go:334] "Generic (PLEG): container finished" podID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerID="0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309" exitCode=0 Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.278709 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wsdgt" event={"ID":"dc82c1a9-5eff-4559-a909-d53b3bc90752","Type":"ContainerDied","Data":"0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309"} Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.278741 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wsdgt" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.278772 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wsdgt" event={"ID":"dc82c1a9-5eff-4559-a909-d53b3bc90752","Type":"ContainerDied","Data":"4c5306a5254efb8f5d748b0fd811d770f4bc958c4368d75aa3a186f8e017bcfd"} Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.278795 4876 scope.go:117] "RemoveContainer" containerID="0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.304758 4876 scope.go:117] "RemoveContainer" containerID="67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.313711 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wsdgt"] Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.322509 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wsdgt"] Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.325890 4876 scope.go:117] "RemoveContainer" containerID="77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.375732 4876 scope.go:117] "RemoveContainer" containerID="0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309" Dec 15 09:55:42 crc kubenswrapper[4876]: E1215 09:55:42.376088 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309\": container with ID starting with 0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309 not found: ID does not exist" containerID="0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.376141 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309"} err="failed to get container status \"0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309\": rpc error: code = NotFound desc = could not find container \"0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309\": container with ID starting with 0aee68d25367ca69891cef39f4a52d73565b96b2709f78499ba9256a4d088309 not found: ID does not exist" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.376170 4876 scope.go:117] "RemoveContainer" containerID="67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b" Dec 15 09:55:42 crc kubenswrapper[4876]: E1215 09:55:42.376718 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b\": container with ID starting with 67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b not found: ID does not exist" containerID="67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.376758 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b"} err="failed to get container status \"67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b\": rpc error: code = NotFound desc = could not find container \"67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b\": container with ID starting with 67356a2e5b28450e4a784198adfe62849b342f68f038ce875739ffeb857ea52b not found: ID does not exist" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.376778 4876 scope.go:117] "RemoveContainer" containerID="77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a" Dec 15 09:55:42 crc kubenswrapper[4876]: E1215 09:55:42.377115 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a\": container with ID starting with 77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a not found: ID does not exist" containerID="77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.377144 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a"} err="failed to get container status \"77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a\": rpc error: code = NotFound desc = could not find container \"77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a\": container with ID starting with 77f2eeedb1758f9c7425e47fdf6041b7e91bf25c221dea003899131d67fdc06a not found: ID does not exist" Dec 15 09:55:42 crc kubenswrapper[4876]: I1215 09:55:42.721274 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc82c1a9-5eff-4559-a909-d53b3bc90752" path="/var/lib/kubelet/pods/dc82c1a9-5eff-4559-a909-d53b3bc90752/volumes" Dec 15 09:55:46 crc kubenswrapper[4876]: I1215 09:55:46.779932 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:46 crc kubenswrapper[4876]: I1215 09:55:46.841733 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:47 crc kubenswrapper[4876]: I1215 09:55:47.015366 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bdct9"] Dec 15 09:55:48 crc kubenswrapper[4876]: I1215 09:55:48.335420 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bdct9" podUID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerName="registry-server" containerID="cri-o://04491ca707fcf763acc117203f810317287c1eb66df083d1f6942b1c5878d741" gracePeriod=2 Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.348517 4876 generic.go:334] "Generic (PLEG): container finished" podID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerID="04491ca707fcf763acc117203f810317287c1eb66df083d1f6942b1c5878d741" exitCode=0 Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.348586 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bdct9" event={"ID":"0fd4b92c-ea48-48c8-96a3-6be4685d0825","Type":"ContainerDied","Data":"04491ca707fcf763acc117203f810317287c1eb66df083d1f6942b1c5878d741"} Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.461933 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.610380 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrtvx\" (UniqueName: \"kubernetes.io/projected/0fd4b92c-ea48-48c8-96a3-6be4685d0825-kube-api-access-wrtvx\") pod \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.610530 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-catalog-content\") pod \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.610614 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-utilities\") pod \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\" (UID: \"0fd4b92c-ea48-48c8-96a3-6be4685d0825\") " Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.611540 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-utilities" (OuterVolumeSpecName: "utilities") pod "0fd4b92c-ea48-48c8-96a3-6be4685d0825" (UID: "0fd4b92c-ea48-48c8-96a3-6be4685d0825"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.621625 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fd4b92c-ea48-48c8-96a3-6be4685d0825-kube-api-access-wrtvx" (OuterVolumeSpecName: "kube-api-access-wrtvx") pod "0fd4b92c-ea48-48c8-96a3-6be4685d0825" (UID: "0fd4b92c-ea48-48c8-96a3-6be4685d0825"). InnerVolumeSpecName "kube-api-access-wrtvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.712838 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.713088 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrtvx\" (UniqueName: \"kubernetes.io/projected/0fd4b92c-ea48-48c8-96a3-6be4685d0825-kube-api-access-wrtvx\") on node \"crc\" DevicePath \"\"" Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.764797 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0fd4b92c-ea48-48c8-96a3-6be4685d0825" (UID: "0fd4b92c-ea48-48c8-96a3-6be4685d0825"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:55:49 crc kubenswrapper[4876]: I1215 09:55:49.815259 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0fd4b92c-ea48-48c8-96a3-6be4685d0825-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:55:50 crc kubenswrapper[4876]: I1215 09:55:50.360063 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bdct9" event={"ID":"0fd4b92c-ea48-48c8-96a3-6be4685d0825","Type":"ContainerDied","Data":"c643f7e603bd7c215542c49e4a9e2168d9da086786a15e24620910b1741fdad5"} Dec 15 09:55:50 crc kubenswrapper[4876]: I1215 09:55:50.360125 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bdct9" Dec 15 09:55:50 crc kubenswrapper[4876]: I1215 09:55:50.360141 4876 scope.go:117] "RemoveContainer" containerID="04491ca707fcf763acc117203f810317287c1eb66df083d1f6942b1c5878d741" Dec 15 09:55:50 crc kubenswrapper[4876]: I1215 09:55:50.391867 4876 scope.go:117] "RemoveContainer" containerID="1e559980779b44bbb779fb220bcd9b4a58b761476cc1f021041f562d5377842c" Dec 15 09:55:50 crc kubenswrapper[4876]: I1215 09:55:50.411454 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bdct9"] Dec 15 09:55:50 crc kubenswrapper[4876]: I1215 09:55:50.420709 4876 scope.go:117] "RemoveContainer" containerID="a89f6ef35cbdf2f432866d8afe4118b875edd29370b0f34d8035b5b7d0d3832a" Dec 15 09:55:50 crc kubenswrapper[4876]: I1215 09:55:50.421545 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bdct9"] Dec 15 09:55:50 crc kubenswrapper[4876]: I1215 09:55:50.717773 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" path="/var/lib/kubelet/pods/0fd4b92c-ea48-48c8-96a3-6be4685d0825/volumes" Dec 15 09:56:57 crc kubenswrapper[4876]: I1215 09:56:57.323030 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:56:57 crc kubenswrapper[4876]: I1215 09:56:57.323664 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:57:27 crc kubenswrapper[4876]: I1215 09:57:27.323155 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:57:27 crc kubenswrapper[4876]: I1215 09:57:27.323677 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.092254 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 15 09:57:54 crc kubenswrapper[4876]: E1215 09:57:54.093756 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerName="extract-content" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.093780 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerName="extract-content" Dec 15 09:57:54 crc kubenswrapper[4876]: E1215 09:57:54.093835 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerName="extract-content" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.093844 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerName="extract-content" Dec 15 09:57:54 crc kubenswrapper[4876]: E1215 09:57:54.093889 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerName="extract-utilities" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.093902 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerName="extract-utilities" Dec 15 09:57:54 crc kubenswrapper[4876]: E1215 09:57:54.093923 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerName="registry-server" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.093932 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerName="registry-server" Dec 15 09:57:54 crc kubenswrapper[4876]: E1215 09:57:54.093951 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerName="extract-utilities" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.093959 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerName="extract-utilities" Dec 15 09:57:54 crc kubenswrapper[4876]: E1215 09:57:54.093988 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerName="registry-server" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.093997 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerName="registry-server" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.094616 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc82c1a9-5eff-4559-a909-d53b3bc90752" containerName="registry-server" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.094649 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fd4b92c-ea48-48c8-96a3-6be4685d0825" containerName="registry-server" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.095940 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.098519 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.101269 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.101905 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-qgf6m" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.102339 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.103168 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.206922 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzzz5\" (UniqueName: \"kubernetes.io/projected/7e4c7322-1b07-4628-85ab-5f72f6f44e04-kube-api-access-qzzz5\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.207333 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.207450 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.207500 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.207604 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-config-data\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.207759 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.207806 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.207832 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.207935 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.309223 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.309292 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.309312 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-config-data\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.309353 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.309375 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.309392 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.309424 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.309478 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzzz5\" (UniqueName: \"kubernetes.io/projected/7e4c7322-1b07-4628-85ab-5f72f6f44e04-kube-api-access-qzzz5\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.309505 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.309905 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.310910 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.311117 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.311882 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-config-data\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.311974 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.315430 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.315864 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.318083 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.340441 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.341606 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzzz5\" (UniqueName: \"kubernetes.io/projected/7e4c7322-1b07-4628-85ab-5f72f6f44e04-kube-api-access-qzzz5\") pod \"tempest-tests-tempest\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.430214 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 15 09:57:54 crc kubenswrapper[4876]: I1215 09:57:54.879054 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 15 09:57:55 crc kubenswrapper[4876]: I1215 09:57:55.610929 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7e4c7322-1b07-4628-85ab-5f72f6f44e04","Type":"ContainerStarted","Data":"36e0c591e8c5d4d43c9f97305838e56b6a8419222b2b4caa58c8b542f3c26fca"} Dec 15 09:57:57 crc kubenswrapper[4876]: I1215 09:57:57.322939 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 09:57:57 crc kubenswrapper[4876]: I1215 09:57:57.323372 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 09:57:57 crc kubenswrapper[4876]: I1215 09:57:57.323428 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 09:57:57 crc kubenswrapper[4876]: I1215 09:57:57.324267 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 09:57:57 crc kubenswrapper[4876]: I1215 09:57:57.324325 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" gracePeriod=600 Dec 15 09:57:57 crc kubenswrapper[4876]: E1215 09:57:57.462060 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:57:57 crc kubenswrapper[4876]: I1215 09:57:57.633956 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" exitCode=0 Dec 15 09:57:57 crc kubenswrapper[4876]: I1215 09:57:57.634006 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28"} Dec 15 09:57:57 crc kubenswrapper[4876]: I1215 09:57:57.634040 4876 scope.go:117] "RemoveContainer" containerID="e2c6ad27d6269068a442b20ce143980a63289eb9976894cf50b1e429e62ae822" Dec 15 09:57:57 crc kubenswrapper[4876]: I1215 09:57:57.635092 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 09:57:57 crc kubenswrapper[4876]: E1215 09:57:57.635393 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.467165 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t246z"] Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.470357 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.476974 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t246z"] Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.658097 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7aa3a255-de24-4937-8163-97efcdd0caed-catalog-content\") pod \"certified-operators-t246z\" (UID: \"7aa3a255-de24-4937-8163-97efcdd0caed\") " pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.658185 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7aa3a255-de24-4937-8163-97efcdd0caed-utilities\") pod \"certified-operators-t246z\" (UID: \"7aa3a255-de24-4937-8163-97efcdd0caed\") " pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.658554 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9smlw\" (UniqueName: \"kubernetes.io/projected/7aa3a255-de24-4937-8163-97efcdd0caed-kube-api-access-9smlw\") pod \"certified-operators-t246z\" (UID: \"7aa3a255-de24-4937-8163-97efcdd0caed\") " pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.761081 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7aa3a255-de24-4937-8163-97efcdd0caed-catalog-content\") pod \"certified-operators-t246z\" (UID: \"7aa3a255-de24-4937-8163-97efcdd0caed\") " pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.761168 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7aa3a255-de24-4937-8163-97efcdd0caed-utilities\") pod \"certified-operators-t246z\" (UID: \"7aa3a255-de24-4937-8163-97efcdd0caed\") " pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.761396 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9smlw\" (UniqueName: \"kubernetes.io/projected/7aa3a255-de24-4937-8163-97efcdd0caed-kube-api-access-9smlw\") pod \"certified-operators-t246z\" (UID: \"7aa3a255-de24-4937-8163-97efcdd0caed\") " pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.761585 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7aa3a255-de24-4937-8163-97efcdd0caed-catalog-content\") pod \"certified-operators-t246z\" (UID: \"7aa3a255-de24-4937-8163-97efcdd0caed\") " pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.762255 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7aa3a255-de24-4937-8163-97efcdd0caed-utilities\") pod \"certified-operators-t246z\" (UID: \"7aa3a255-de24-4937-8163-97efcdd0caed\") " pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.810701 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9smlw\" (UniqueName: \"kubernetes.io/projected/7aa3a255-de24-4937-8163-97efcdd0caed-kube-api-access-9smlw\") pod \"certified-operators-t246z\" (UID: \"7aa3a255-de24-4937-8163-97efcdd0caed\") " pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:07 crc kubenswrapper[4876]: I1215 09:58:07.843224 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:08 crc kubenswrapper[4876]: I1215 09:58:08.431332 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t246z"] Dec 15 09:58:08 crc kubenswrapper[4876]: I1215 09:58:08.808254 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t246z" event={"ID":"7aa3a255-de24-4937-8163-97efcdd0caed","Type":"ContainerStarted","Data":"d9af1a444123e29900e6c7d945a202767e66d3677cbfdfd7e7a05b12efca650c"} Dec 15 09:58:09 crc kubenswrapper[4876]: I1215 09:58:09.706651 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 09:58:09 crc kubenswrapper[4876]: E1215 09:58:09.707201 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:58:09 crc kubenswrapper[4876]: I1215 09:58:09.822659 4876 generic.go:334] "Generic (PLEG): container finished" podID="7aa3a255-de24-4937-8163-97efcdd0caed" containerID="09c9569d2d4d31d440b86e986b89fb02be95021f58bfbdbae72f22611dc4dbab" exitCode=0 Dec 15 09:58:09 crc kubenswrapper[4876]: I1215 09:58:09.822742 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t246z" event={"ID":"7aa3a255-de24-4937-8163-97efcdd0caed","Type":"ContainerDied","Data":"09c9569d2d4d31d440b86e986b89fb02be95021f58bfbdbae72f22611dc4dbab"} Dec 15 09:58:16 crc kubenswrapper[4876]: I1215 09:58:16.908852 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t246z" event={"ID":"7aa3a255-de24-4937-8163-97efcdd0caed","Type":"ContainerStarted","Data":"3ea24df51757ac18ace97c6ddcbb40fe21bf4005520179349a2c46c4d011a371"} Dec 15 09:58:17 crc kubenswrapper[4876]: I1215 09:58:17.934338 4876 generic.go:334] "Generic (PLEG): container finished" podID="7aa3a255-de24-4937-8163-97efcdd0caed" containerID="3ea24df51757ac18ace97c6ddcbb40fe21bf4005520179349a2c46c4d011a371" exitCode=0 Dec 15 09:58:17 crc kubenswrapper[4876]: I1215 09:58:17.934405 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t246z" event={"ID":"7aa3a255-de24-4937-8163-97efcdd0caed","Type":"ContainerDied","Data":"3ea24df51757ac18ace97c6ddcbb40fe21bf4005520179349a2c46c4d011a371"} Dec 15 09:58:18 crc kubenswrapper[4876]: I1215 09:58:18.949502 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t246z" event={"ID":"7aa3a255-de24-4937-8163-97efcdd0caed","Type":"ContainerStarted","Data":"0d8232b55ddc0e540d86475d5a385667c7de0b028350a637cba92084a6c16358"} Dec 15 09:58:18 crc kubenswrapper[4876]: I1215 09:58:18.972754 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t246z" podStartSLOduration=3.227502433 podStartE2EDuration="11.97273356s" podCreationTimestamp="2025-12-15 09:58:07 +0000 UTC" firstStartedPulling="2025-12-15 09:58:09.826086478 +0000 UTC m=+11215.397229379" lastFinishedPulling="2025-12-15 09:58:18.571317595 +0000 UTC m=+11224.142460506" observedRunningTime="2025-12-15 09:58:18.966976155 +0000 UTC m=+11224.538119076" watchObservedRunningTime="2025-12-15 09:58:18.97273356 +0000 UTC m=+11224.543876481" Dec 15 09:58:21 crc kubenswrapper[4876]: I1215 09:58:21.705298 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 09:58:21 crc kubenswrapper[4876]: E1215 09:58:21.705967 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:58:27 crc kubenswrapper[4876]: I1215 09:58:27.843526 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:27 crc kubenswrapper[4876]: I1215 09:58:27.844090 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:27 crc kubenswrapper[4876]: I1215 09:58:27.895875 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:28 crc kubenswrapper[4876]: I1215 09:58:28.106610 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t246z" Dec 15 09:58:28 crc kubenswrapper[4876]: I1215 09:58:28.170744 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t246z"] Dec 15 09:58:28 crc kubenswrapper[4876]: I1215 09:58:28.273450 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vflnr"] Dec 15 09:58:28 crc kubenswrapper[4876]: I1215 09:58:28.273693 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vflnr" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerName="registry-server" containerID="cri-o://3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67" gracePeriod=2 Dec 15 09:58:29 crc kubenswrapper[4876]: I1215 09:58:29.059004 4876 generic.go:334] "Generic (PLEG): container finished" podID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerID="3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67" exitCode=0 Dec 15 09:58:29 crc kubenswrapper[4876]: I1215 09:58:29.059291 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vflnr" event={"ID":"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14","Type":"ContainerDied","Data":"3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67"} Dec 15 09:58:31 crc kubenswrapper[4876]: E1215 09:58:31.706686 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67 is running failed: container process not found" containerID="3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67" cmd=["grpc_health_probe","-addr=:50051"] Dec 15 09:58:31 crc kubenswrapper[4876]: E1215 09:58:31.707614 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67 is running failed: container process not found" containerID="3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67" cmd=["grpc_health_probe","-addr=:50051"] Dec 15 09:58:31 crc kubenswrapper[4876]: E1215 09:58:31.707970 4876 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67 is running failed: container process not found" containerID="3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67" cmd=["grpc_health_probe","-addr=:50051"] Dec 15 09:58:31 crc kubenswrapper[4876]: E1215 09:58:31.708052 4876 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-vflnr" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerName="registry-server" Dec 15 09:58:32 crc kubenswrapper[4876]: I1215 09:58:32.706256 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 09:58:32 crc kubenswrapper[4876]: E1215 09:58:32.706965 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:58:32 crc kubenswrapper[4876]: I1215 09:58:32.840702 4876 scope.go:117] "RemoveContainer" containerID="b8542d1c8ea43d3fb260cae590eb07f3a8fc892cdedebaff0e47498c5ab1cebc" Dec 15 09:58:39 crc kubenswrapper[4876]: I1215 09:58:39.637665 4876 scope.go:117] "RemoveContainer" containerID="bef2f82a0700da52be62f0ab90d6f02ecfa01affa6a20a631c0d179fb4a306ac" Dec 15 09:58:39 crc kubenswrapper[4876]: E1215 09:58:39.687931 4876 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:c3a837a7c939c44c9106d2b2c7c72015" Dec 15 09:58:39 crc kubenswrapper[4876]: E1215 09:58:39.687991 4876 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:c3a837a7c939c44c9106d2b2c7c72015" Dec 15 09:58:39 crc kubenswrapper[4876]: E1215 09:58:39.688202 4876 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:c3a837a7c939c44c9106d2b2c7c72015,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qzzz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(7e4c7322-1b07-4628-85ab-5f72f6f44e04): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 15 09:58:39 crc kubenswrapper[4876]: E1215 09:58:39.689923 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="7e4c7322-1b07-4628-85ab-5f72f6f44e04" Dec 15 09:58:39 crc kubenswrapper[4876]: I1215 09:58:39.746571 4876 scope.go:117] "RemoveContainer" containerID="02f1731bf581f7a793a99a5cd6e7a94628dfd2aed59ca3805d0255e35386c31c" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.126730 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.174212 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vflnr" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.174447 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vflnr" event={"ID":"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14","Type":"ContainerDied","Data":"dca7350f3c5f6e0e02eff5bf272199842467cd98c1041ad8252e7517bccc4e47"} Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.174492 4876 scope.go:117] "RemoveContainer" containerID="3da5d1536e9989afb4a00122db223bf354ba56c0f961daa44ea594b4fbd19e67" Dec 15 09:58:40 crc kubenswrapper[4876]: E1215 09:58:40.175835 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:c3a837a7c939c44c9106d2b2c7c72015\\\"\"" pod="openstack/tempest-tests-tempest" podUID="7e4c7322-1b07-4628-85ab-5f72f6f44e04" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.203201 4876 scope.go:117] "RemoveContainer" containerID="2340663bbba63fda8c94e5ce74aa0415f0c98751348cac36a2dcb5bfec750cd5" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.222888 4876 scope.go:117] "RemoveContainer" containerID="dfea9cb428fc6dbbfc9233d5f7fbdeb7184285a3dfb12593a4c3158ffa936227" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.228978 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-catalog-content\") pod \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.229123 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-utilities\") pod \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.229389 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wn65\" (UniqueName: \"kubernetes.io/projected/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-kube-api-access-2wn65\") pod \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\" (UID: \"a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14\") " Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.230128 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-utilities" (OuterVolumeSpecName: "utilities") pod "a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" (UID: "a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.230769 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.235589 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-kube-api-access-2wn65" (OuterVolumeSpecName: "kube-api-access-2wn65") pod "a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" (UID: "a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14"). InnerVolumeSpecName "kube-api-access-2wn65". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.276541 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" (UID: "a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.333303 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.333644 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wn65\" (UniqueName: \"kubernetes.io/projected/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14-kube-api-access-2wn65\") on node \"crc\" DevicePath \"\"" Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.519806 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vflnr"] Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.529673 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vflnr"] Dec 15 09:58:40 crc kubenswrapper[4876]: I1215 09:58:40.720062 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" path="/var/lib/kubelet/pods/a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14/volumes" Dec 15 09:58:47 crc kubenswrapper[4876]: I1215 09:58:47.705856 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 09:58:47 crc kubenswrapper[4876]: E1215 09:58:47.706828 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:58:51 crc kubenswrapper[4876]: I1215 09:58:51.907128 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 15 09:58:53 crc kubenswrapper[4876]: I1215 09:58:53.314881 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7e4c7322-1b07-4628-85ab-5f72f6f44e04","Type":"ContainerStarted","Data":"150f2f27d3e348e1b728f60d4751bc28cf7b9c8b752bf71c7fde0468b6140cfc"} Dec 15 09:58:53 crc kubenswrapper[4876]: I1215 09:58:53.345277 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.323437404 podStartE2EDuration="1m0.345255915s" podCreationTimestamp="2025-12-15 09:57:53 +0000 UTC" firstStartedPulling="2025-12-15 09:57:54.882582441 +0000 UTC m=+11200.453725352" lastFinishedPulling="2025-12-15 09:58:51.904400952 +0000 UTC m=+11257.475543863" observedRunningTime="2025-12-15 09:58:53.336376237 +0000 UTC m=+11258.907519148" watchObservedRunningTime="2025-12-15 09:58:53.345255915 +0000 UTC m=+11258.916398836" Dec 15 09:58:59 crc kubenswrapper[4876]: I1215 09:58:59.705135 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 09:58:59 crc kubenswrapper[4876]: E1215 09:58:59.705946 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:59:10 crc kubenswrapper[4876]: I1215 09:59:10.705713 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 09:59:10 crc kubenswrapper[4876]: E1215 09:59:10.706678 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:59:21 crc kubenswrapper[4876]: I1215 09:59:21.705893 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 09:59:21 crc kubenswrapper[4876]: E1215 09:59:21.706814 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:59:36 crc kubenswrapper[4876]: I1215 09:59:36.705875 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 09:59:36 crc kubenswrapper[4876]: E1215 09:59:36.706908 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 09:59:47 crc kubenswrapper[4876]: I1215 09:59:47.705496 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 09:59:47 crc kubenswrapper[4876]: E1215 09:59:47.706410 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.164179 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz"] Dec 15 10:00:00 crc kubenswrapper[4876]: E1215 10:00:00.166146 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerName="registry-server" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.166171 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerName="registry-server" Dec 15 10:00:00 crc kubenswrapper[4876]: E1215 10:00:00.166194 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerName="extract-utilities" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.166203 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerName="extract-utilities" Dec 15 10:00:00 crc kubenswrapper[4876]: E1215 10:00:00.166240 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerName="extract-content" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.166246 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerName="extract-content" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.166479 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="a192fb4e-1fc4-4e3a-ab7a-1c7df6e71b14" containerName="registry-server" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.167281 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.169283 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.169396 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.194081 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz"] Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.232017 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7htg\" (UniqueName: \"kubernetes.io/projected/e4a312f6-b06b-4ff4-9993-c1ad922c1980-kube-api-access-p7htg\") pod \"collect-profiles-29429880-c9lzz\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.232251 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e4a312f6-b06b-4ff4-9993-c1ad922c1980-secret-volume\") pod \"collect-profiles-29429880-c9lzz\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.232338 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e4a312f6-b06b-4ff4-9993-c1ad922c1980-config-volume\") pod \"collect-profiles-29429880-c9lzz\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.333644 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7htg\" (UniqueName: \"kubernetes.io/projected/e4a312f6-b06b-4ff4-9993-c1ad922c1980-kube-api-access-p7htg\") pod \"collect-profiles-29429880-c9lzz\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.333998 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e4a312f6-b06b-4ff4-9993-c1ad922c1980-secret-volume\") pod \"collect-profiles-29429880-c9lzz\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.334149 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e4a312f6-b06b-4ff4-9993-c1ad922c1980-config-volume\") pod \"collect-profiles-29429880-c9lzz\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.335360 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e4a312f6-b06b-4ff4-9993-c1ad922c1980-config-volume\") pod \"collect-profiles-29429880-c9lzz\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.341004 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e4a312f6-b06b-4ff4-9993-c1ad922c1980-secret-volume\") pod \"collect-profiles-29429880-c9lzz\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.351421 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7htg\" (UniqueName: \"kubernetes.io/projected/e4a312f6-b06b-4ff4-9993-c1ad922c1980-kube-api-access-p7htg\") pod \"collect-profiles-29429880-c9lzz\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:00 crc kubenswrapper[4876]: I1215 10:00:00.501151 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:01 crc kubenswrapper[4876]: I1215 10:00:01.018174 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz"] Dec 15 10:00:01 crc kubenswrapper[4876]: I1215 10:00:01.083178 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" event={"ID":"e4a312f6-b06b-4ff4-9993-c1ad922c1980","Type":"ContainerStarted","Data":"b9d83055b7acc18cb590eb991d4087cd2c08504e32d81e6165b5fdce2bbb5ced"} Dec 15 10:00:02 crc kubenswrapper[4876]: I1215 10:00:02.096053 4876 generic.go:334] "Generic (PLEG): container finished" podID="e4a312f6-b06b-4ff4-9993-c1ad922c1980" containerID="d3aac22b363419ed030c193992b21244ca61674d874444987235e6167eb0307a" exitCode=0 Dec 15 10:00:02 crc kubenswrapper[4876]: I1215 10:00:02.096136 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" event={"ID":"e4a312f6-b06b-4ff4-9993-c1ad922c1980","Type":"ContainerDied","Data":"d3aac22b363419ed030c193992b21244ca61674d874444987235e6167eb0307a"} Dec 15 10:00:02 crc kubenswrapper[4876]: I1215 10:00:02.706327 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:00:02 crc kubenswrapper[4876]: E1215 10:00:02.706601 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:00:03 crc kubenswrapper[4876]: I1215 10:00:03.591538 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:03 crc kubenswrapper[4876]: I1215 10:00:03.714502 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7htg\" (UniqueName: \"kubernetes.io/projected/e4a312f6-b06b-4ff4-9993-c1ad922c1980-kube-api-access-p7htg\") pod \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " Dec 15 10:00:03 crc kubenswrapper[4876]: I1215 10:00:03.714838 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e4a312f6-b06b-4ff4-9993-c1ad922c1980-config-volume\") pod \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " Dec 15 10:00:03 crc kubenswrapper[4876]: I1215 10:00:03.715027 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e4a312f6-b06b-4ff4-9993-c1ad922c1980-secret-volume\") pod \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\" (UID: \"e4a312f6-b06b-4ff4-9993-c1ad922c1980\") " Dec 15 10:00:03 crc kubenswrapper[4876]: I1215 10:00:03.715643 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4a312f6-b06b-4ff4-9993-c1ad922c1980-config-volume" (OuterVolumeSpecName: "config-volume") pod "e4a312f6-b06b-4ff4-9993-c1ad922c1980" (UID: "e4a312f6-b06b-4ff4-9993-c1ad922c1980"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 10:00:03 crc kubenswrapper[4876]: I1215 10:00:03.731544 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4a312f6-b06b-4ff4-9993-c1ad922c1980-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e4a312f6-b06b-4ff4-9993-c1ad922c1980" (UID: "e4a312f6-b06b-4ff4-9993-c1ad922c1980"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 10:00:03 crc kubenswrapper[4876]: I1215 10:00:03.731589 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4a312f6-b06b-4ff4-9993-c1ad922c1980-kube-api-access-p7htg" (OuterVolumeSpecName: "kube-api-access-p7htg") pod "e4a312f6-b06b-4ff4-9993-c1ad922c1980" (UID: "e4a312f6-b06b-4ff4-9993-c1ad922c1980"). InnerVolumeSpecName "kube-api-access-p7htg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:00:03 crc kubenswrapper[4876]: I1215 10:00:03.818681 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7htg\" (UniqueName: \"kubernetes.io/projected/e4a312f6-b06b-4ff4-9993-c1ad922c1980-kube-api-access-p7htg\") on node \"crc\" DevicePath \"\"" Dec 15 10:00:03 crc kubenswrapper[4876]: I1215 10:00:03.818719 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e4a312f6-b06b-4ff4-9993-c1ad922c1980-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 10:00:03 crc kubenswrapper[4876]: I1215 10:00:03.818732 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e4a312f6-b06b-4ff4-9993-c1ad922c1980-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 10:00:04 crc kubenswrapper[4876]: I1215 10:00:04.115219 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" event={"ID":"e4a312f6-b06b-4ff4-9993-c1ad922c1980","Type":"ContainerDied","Data":"b9d83055b7acc18cb590eb991d4087cd2c08504e32d81e6165b5fdce2bbb5ced"} Dec 15 10:00:04 crc kubenswrapper[4876]: I1215 10:00:04.115261 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9d83055b7acc18cb590eb991d4087cd2c08504e32d81e6165b5fdce2bbb5ced" Dec 15 10:00:04 crc kubenswrapper[4876]: I1215 10:00:04.115277 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429880-c9lzz" Dec 15 10:00:04 crc kubenswrapper[4876]: I1215 10:00:04.669837 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp"] Dec 15 10:00:04 crc kubenswrapper[4876]: I1215 10:00:04.679086 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429835-jvsjp"] Dec 15 10:00:04 crc kubenswrapper[4876]: I1215 10:00:04.728228 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c04c1272-651f-4964-bd1b-5fae3e479f8e" path="/var/lib/kubelet/pods/c04c1272-651f-4964-bd1b-5fae3e479f8e/volumes" Dec 15 10:00:15 crc kubenswrapper[4876]: I1215 10:00:15.705897 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:00:15 crc kubenswrapper[4876]: E1215 10:00:15.706799 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:00:26 crc kubenswrapper[4876]: I1215 10:00:26.705505 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:00:26 crc kubenswrapper[4876]: E1215 10:00:26.706174 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:00:39 crc kubenswrapper[4876]: I1215 10:00:39.705668 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:00:39 crc kubenswrapper[4876]: E1215 10:00:39.706453 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:00:39 crc kubenswrapper[4876]: I1215 10:00:39.844486 4876 scope.go:117] "RemoveContainer" containerID="807bb49da6c36f0b0e8e8e7c40318dc01cba9b8b839ac8ff5d8c22d7be167466" Dec 15 10:00:54 crc kubenswrapper[4876]: I1215 10:00:54.716950 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:00:54 crc kubenswrapper[4876]: E1215 10:00:54.717899 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.153381 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29429881-6jcgx"] Dec 15 10:01:00 crc kubenswrapper[4876]: E1215 10:01:00.154441 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4a312f6-b06b-4ff4-9993-c1ad922c1980" containerName="collect-profiles" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.154459 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4a312f6-b06b-4ff4-9993-c1ad922c1980" containerName="collect-profiles" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.154784 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4a312f6-b06b-4ff4-9993-c1ad922c1980" containerName="collect-profiles" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.155638 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.165709 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29429881-6jcgx"] Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.296362 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-fernet-keys\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.296753 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-config-data\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.296807 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c27m\" (UniqueName: \"kubernetes.io/projected/866c0912-2922-4832-8398-4953a10b5661-kube-api-access-2c27m\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.296871 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-combined-ca-bundle\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.398267 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-config-data\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.398320 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c27m\" (UniqueName: \"kubernetes.io/projected/866c0912-2922-4832-8398-4953a10b5661-kube-api-access-2c27m\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.398372 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-combined-ca-bundle\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.398541 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-fernet-keys\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.407319 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-combined-ca-bundle\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.407905 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-config-data\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.410005 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-fernet-keys\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.419347 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c27m\" (UniqueName: \"kubernetes.io/projected/866c0912-2922-4832-8398-4953a10b5661-kube-api-access-2c27m\") pod \"keystone-cron-29429881-6jcgx\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.499331 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:00 crc kubenswrapper[4876]: I1215 10:01:00.983605 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29429881-6jcgx"] Dec 15 10:01:00 crc kubenswrapper[4876]: W1215 10:01:00.988319 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod866c0912_2922_4832_8398_4953a10b5661.slice/crio-eda866e5a1a4eab6a8f84658b293cca291821851329c6e72dcff9e6da9505c8c WatchSource:0}: Error finding container eda866e5a1a4eab6a8f84658b293cca291821851329c6e72dcff9e6da9505c8c: Status 404 returned error can't find the container with id eda866e5a1a4eab6a8f84658b293cca291821851329c6e72dcff9e6da9505c8c Dec 15 10:01:01 crc kubenswrapper[4876]: I1215 10:01:01.694412 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29429881-6jcgx" event={"ID":"866c0912-2922-4832-8398-4953a10b5661","Type":"ContainerStarted","Data":"11e3655e833419645f06947c7ee60377996be2b94597c2f6f4ffc100d83bb063"} Dec 15 10:01:01 crc kubenswrapper[4876]: I1215 10:01:01.694957 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29429881-6jcgx" event={"ID":"866c0912-2922-4832-8398-4953a10b5661","Type":"ContainerStarted","Data":"eda866e5a1a4eab6a8f84658b293cca291821851329c6e72dcff9e6da9505c8c"} Dec 15 10:01:01 crc kubenswrapper[4876]: I1215 10:01:01.716811 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29429881-6jcgx" podStartSLOduration=1.716794454 podStartE2EDuration="1.716794454s" podCreationTimestamp="2025-12-15 10:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-15 10:01:01.708233175 +0000 UTC m=+11387.279376086" watchObservedRunningTime="2025-12-15 10:01:01.716794454 +0000 UTC m=+11387.287937365" Dec 15 10:01:04 crc kubenswrapper[4876]: I1215 10:01:04.752933 4876 generic.go:334] "Generic (PLEG): container finished" podID="866c0912-2922-4832-8398-4953a10b5661" containerID="11e3655e833419645f06947c7ee60377996be2b94597c2f6f4ffc100d83bb063" exitCode=0 Dec 15 10:01:04 crc kubenswrapper[4876]: I1215 10:01:04.753020 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29429881-6jcgx" event={"ID":"866c0912-2922-4832-8398-4953a10b5661","Type":"ContainerDied","Data":"11e3655e833419645f06947c7ee60377996be2b94597c2f6f4ffc100d83bb063"} Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.328626 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.432650 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c27m\" (UniqueName: \"kubernetes.io/projected/866c0912-2922-4832-8398-4953a10b5661-kube-api-access-2c27m\") pod \"866c0912-2922-4832-8398-4953a10b5661\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.432706 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-config-data\") pod \"866c0912-2922-4832-8398-4953a10b5661\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.432829 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-combined-ca-bundle\") pod \"866c0912-2922-4832-8398-4953a10b5661\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.432930 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-fernet-keys\") pod \"866c0912-2922-4832-8398-4953a10b5661\" (UID: \"866c0912-2922-4832-8398-4953a10b5661\") " Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.438828 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "866c0912-2922-4832-8398-4953a10b5661" (UID: "866c0912-2922-4832-8398-4953a10b5661"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.438853 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/866c0912-2922-4832-8398-4953a10b5661-kube-api-access-2c27m" (OuterVolumeSpecName: "kube-api-access-2c27m") pod "866c0912-2922-4832-8398-4953a10b5661" (UID: "866c0912-2922-4832-8398-4953a10b5661"). InnerVolumeSpecName "kube-api-access-2c27m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.476289 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "866c0912-2922-4832-8398-4953a10b5661" (UID: "866c0912-2922-4832-8398-4953a10b5661"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.507543 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-config-data" (OuterVolumeSpecName: "config-data") pod "866c0912-2922-4832-8398-4953a10b5661" (UID: "866c0912-2922-4832-8398-4953a10b5661"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.535283 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c27m\" (UniqueName: \"kubernetes.io/projected/866c0912-2922-4832-8398-4953a10b5661-kube-api-access-2c27m\") on node \"crc\" DevicePath \"\"" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.535319 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.535330 4876 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.535339 4876 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/866c0912-2922-4832-8398-4953a10b5661-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.705951 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:01:06 crc kubenswrapper[4876]: E1215 10:01:06.706547 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.813770 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29429881-6jcgx" event={"ID":"866c0912-2922-4832-8398-4953a10b5661","Type":"ContainerDied","Data":"eda866e5a1a4eab6a8f84658b293cca291821851329c6e72dcff9e6da9505c8c"} Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.813810 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eda866e5a1a4eab6a8f84658b293cca291821851329c6e72dcff9e6da9505c8c" Dec 15 10:01:06 crc kubenswrapper[4876]: I1215 10:01:06.813861 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29429881-6jcgx" Dec 15 10:01:21 crc kubenswrapper[4876]: I1215 10:01:21.705690 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:01:21 crc kubenswrapper[4876]: E1215 10:01:21.706474 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:01:35 crc kubenswrapper[4876]: I1215 10:01:35.707086 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:01:35 crc kubenswrapper[4876]: E1215 10:01:35.707813 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:01:50 crc kubenswrapper[4876]: I1215 10:01:50.705740 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:01:50 crc kubenswrapper[4876]: E1215 10:01:50.706629 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:02:02 crc kubenswrapper[4876]: I1215 10:02:02.708080 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:02:02 crc kubenswrapper[4876]: E1215 10:02:02.708857 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:02:02 crc kubenswrapper[4876]: I1215 10:02:02.882920 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pjlcm"] Dec 15 10:02:02 crc kubenswrapper[4876]: E1215 10:02:02.883620 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="866c0912-2922-4832-8398-4953a10b5661" containerName="keystone-cron" Dec 15 10:02:02 crc kubenswrapper[4876]: I1215 10:02:02.883647 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="866c0912-2922-4832-8398-4953a10b5661" containerName="keystone-cron" Dec 15 10:02:02 crc kubenswrapper[4876]: I1215 10:02:02.884222 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="866c0912-2922-4832-8398-4953a10b5661" containerName="keystone-cron" Dec 15 10:02:02 crc kubenswrapper[4876]: I1215 10:02:02.886345 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:02 crc kubenswrapper[4876]: I1215 10:02:02.921565 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pjlcm"] Dec 15 10:02:02 crc kubenswrapper[4876]: I1215 10:02:02.978150 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-utilities\") pod \"community-operators-pjlcm\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:02 crc kubenswrapper[4876]: I1215 10:02:02.978394 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qx28\" (UniqueName: \"kubernetes.io/projected/af4347b6-7061-42ee-b536-a13ef19519b8-kube-api-access-7qx28\") pod \"community-operators-pjlcm\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:02 crc kubenswrapper[4876]: I1215 10:02:02.978512 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-catalog-content\") pod \"community-operators-pjlcm\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:03 crc kubenswrapper[4876]: I1215 10:02:03.080043 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-utilities\") pod \"community-operators-pjlcm\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:03 crc kubenswrapper[4876]: I1215 10:02:03.080214 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qx28\" (UniqueName: \"kubernetes.io/projected/af4347b6-7061-42ee-b536-a13ef19519b8-kube-api-access-7qx28\") pod \"community-operators-pjlcm\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:03 crc kubenswrapper[4876]: I1215 10:02:03.080281 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-catalog-content\") pod \"community-operators-pjlcm\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:03 crc kubenswrapper[4876]: I1215 10:02:03.080609 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-utilities\") pod \"community-operators-pjlcm\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:03 crc kubenswrapper[4876]: I1215 10:02:03.080834 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-catalog-content\") pod \"community-operators-pjlcm\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:03 crc kubenswrapper[4876]: I1215 10:02:03.112610 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qx28\" (UniqueName: \"kubernetes.io/projected/af4347b6-7061-42ee-b536-a13ef19519b8-kube-api-access-7qx28\") pod \"community-operators-pjlcm\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:03 crc kubenswrapper[4876]: I1215 10:02:03.206748 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:03 crc kubenswrapper[4876]: I1215 10:02:03.797622 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pjlcm"] Dec 15 10:02:04 crc kubenswrapper[4876]: I1215 10:02:04.392278 4876 generic.go:334] "Generic (PLEG): container finished" podID="af4347b6-7061-42ee-b536-a13ef19519b8" containerID="9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356" exitCode=0 Dec 15 10:02:04 crc kubenswrapper[4876]: I1215 10:02:04.392381 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjlcm" event={"ID":"af4347b6-7061-42ee-b536-a13ef19519b8","Type":"ContainerDied","Data":"9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356"} Dec 15 10:02:04 crc kubenswrapper[4876]: I1215 10:02:04.392600 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjlcm" event={"ID":"af4347b6-7061-42ee-b536-a13ef19519b8","Type":"ContainerStarted","Data":"89fcb3a162b1b839dddb2f9a558e1a085f5f3b67a367eae1b60635cf5b74c20c"} Dec 15 10:02:04 crc kubenswrapper[4876]: I1215 10:02:04.394737 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 10:02:05 crc kubenswrapper[4876]: I1215 10:02:05.405552 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjlcm" event={"ID":"af4347b6-7061-42ee-b536-a13ef19519b8","Type":"ContainerStarted","Data":"3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb"} Dec 15 10:02:06 crc kubenswrapper[4876]: I1215 10:02:06.417954 4876 generic.go:334] "Generic (PLEG): container finished" podID="af4347b6-7061-42ee-b536-a13ef19519b8" containerID="3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb" exitCode=0 Dec 15 10:02:06 crc kubenswrapper[4876]: I1215 10:02:06.417996 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjlcm" event={"ID":"af4347b6-7061-42ee-b536-a13ef19519b8","Type":"ContainerDied","Data":"3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb"} Dec 15 10:02:08 crc kubenswrapper[4876]: I1215 10:02:08.441351 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjlcm" event={"ID":"af4347b6-7061-42ee-b536-a13ef19519b8","Type":"ContainerStarted","Data":"5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9"} Dec 15 10:02:08 crc kubenswrapper[4876]: I1215 10:02:08.466422 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pjlcm" podStartSLOduration=2.962830033 podStartE2EDuration="6.466388058s" podCreationTimestamp="2025-12-15 10:02:02 +0000 UTC" firstStartedPulling="2025-12-15 10:02:04.394454875 +0000 UTC m=+11449.965597786" lastFinishedPulling="2025-12-15 10:02:07.8980129 +0000 UTC m=+11453.469155811" observedRunningTime="2025-12-15 10:02:08.460758687 +0000 UTC m=+11454.031901598" watchObservedRunningTime="2025-12-15 10:02:08.466388058 +0000 UTC m=+11454.037530969" Dec 15 10:02:13 crc kubenswrapper[4876]: I1215 10:02:13.207669 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:13 crc kubenswrapper[4876]: I1215 10:02:13.209433 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:13 crc kubenswrapper[4876]: I1215 10:02:13.266134 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:13 crc kubenswrapper[4876]: I1215 10:02:13.563378 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:13 crc kubenswrapper[4876]: I1215 10:02:13.610204 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pjlcm"] Dec 15 10:02:13 crc kubenswrapper[4876]: I1215 10:02:13.706124 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:02:13 crc kubenswrapper[4876]: E1215 10:02:13.706361 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:02:15 crc kubenswrapper[4876]: I1215 10:02:15.525819 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pjlcm" podUID="af4347b6-7061-42ee-b536-a13ef19519b8" containerName="registry-server" containerID="cri-o://5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9" gracePeriod=2 Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.225507 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.385410 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qx28\" (UniqueName: \"kubernetes.io/projected/af4347b6-7061-42ee-b536-a13ef19519b8-kube-api-access-7qx28\") pod \"af4347b6-7061-42ee-b536-a13ef19519b8\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.385517 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-catalog-content\") pod \"af4347b6-7061-42ee-b536-a13ef19519b8\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.385576 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-utilities\") pod \"af4347b6-7061-42ee-b536-a13ef19519b8\" (UID: \"af4347b6-7061-42ee-b536-a13ef19519b8\") " Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.386536 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-utilities" (OuterVolumeSpecName: "utilities") pod "af4347b6-7061-42ee-b536-a13ef19519b8" (UID: "af4347b6-7061-42ee-b536-a13ef19519b8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.401716 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af4347b6-7061-42ee-b536-a13ef19519b8-kube-api-access-7qx28" (OuterVolumeSpecName: "kube-api-access-7qx28") pod "af4347b6-7061-42ee-b536-a13ef19519b8" (UID: "af4347b6-7061-42ee-b536-a13ef19519b8"). InnerVolumeSpecName "kube-api-access-7qx28". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.448023 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af4347b6-7061-42ee-b536-a13ef19519b8" (UID: "af4347b6-7061-42ee-b536-a13ef19519b8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.488465 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.488521 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af4347b6-7061-42ee-b536-a13ef19519b8-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.488537 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qx28\" (UniqueName: \"kubernetes.io/projected/af4347b6-7061-42ee-b536-a13ef19519b8-kube-api-access-7qx28\") on node \"crc\" DevicePath \"\"" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.535811 4876 generic.go:334] "Generic (PLEG): container finished" podID="af4347b6-7061-42ee-b536-a13ef19519b8" containerID="5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9" exitCode=0 Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.535857 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjlcm" event={"ID":"af4347b6-7061-42ee-b536-a13ef19519b8","Type":"ContainerDied","Data":"5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9"} Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.535884 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pjlcm" event={"ID":"af4347b6-7061-42ee-b536-a13ef19519b8","Type":"ContainerDied","Data":"89fcb3a162b1b839dddb2f9a558e1a085f5f3b67a367eae1b60635cf5b74c20c"} Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.535901 4876 scope.go:117] "RemoveContainer" containerID="5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.536035 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pjlcm" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.570434 4876 scope.go:117] "RemoveContainer" containerID="3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.586441 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pjlcm"] Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.596033 4876 scope.go:117] "RemoveContainer" containerID="9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.598290 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pjlcm"] Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.637123 4876 scope.go:117] "RemoveContainer" containerID="5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9" Dec 15 10:02:16 crc kubenswrapper[4876]: E1215 10:02:16.637621 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9\": container with ID starting with 5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9 not found: ID does not exist" containerID="5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.637662 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9"} err="failed to get container status \"5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9\": rpc error: code = NotFound desc = could not find container \"5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9\": container with ID starting with 5ef8a7cf7f060a467c9a84d8e7e794ce25d0f991f7f3fc4f2beb53bf498d93f9 not found: ID does not exist" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.637689 4876 scope.go:117] "RemoveContainer" containerID="3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb" Dec 15 10:02:16 crc kubenswrapper[4876]: E1215 10:02:16.638039 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb\": container with ID starting with 3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb not found: ID does not exist" containerID="3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.638095 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb"} err="failed to get container status \"3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb\": rpc error: code = NotFound desc = could not find container \"3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb\": container with ID starting with 3d0eecaa0ada95a40af3021d842db2ea9c180e2ba3c88c388efaa65064407deb not found: ID does not exist" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.638215 4876 scope.go:117] "RemoveContainer" containerID="9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356" Dec 15 10:02:16 crc kubenswrapper[4876]: E1215 10:02:16.638721 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356\": container with ID starting with 9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356 not found: ID does not exist" containerID="9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.638756 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356"} err="failed to get container status \"9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356\": rpc error: code = NotFound desc = could not find container \"9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356\": container with ID starting with 9b3392fc020fab6e7f345dbdf21e6ef026e45a0015c475943c88f823d2c62356 not found: ID does not exist" Dec 15 10:02:16 crc kubenswrapper[4876]: I1215 10:02:16.718097 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af4347b6-7061-42ee-b536-a13ef19519b8" path="/var/lib/kubelet/pods/af4347b6-7061-42ee-b536-a13ef19519b8/volumes" Dec 15 10:02:26 crc kubenswrapper[4876]: I1215 10:02:26.705763 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:02:26 crc kubenswrapper[4876]: E1215 10:02:26.706474 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:02:41 crc kubenswrapper[4876]: I1215 10:02:41.706064 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:02:41 crc kubenswrapper[4876]: E1215 10:02:41.706825 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:02:53 crc kubenswrapper[4876]: I1215 10:02:53.705611 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:02:53 crc kubenswrapper[4876]: E1215 10:02:53.706621 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:03:06 crc kubenswrapper[4876]: I1215 10:03:06.707016 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:03:07 crc kubenswrapper[4876]: I1215 10:03:07.083141 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"b5d396a91bd8b093bbc76d88958c18367efd4df6f555db8f24fe0a5b15db35df"} Dec 15 10:05:27 crc kubenswrapper[4876]: I1215 10:05:27.323054 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:05:27 crc kubenswrapper[4876]: I1215 10:05:27.324735 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.562307 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tmfxn"] Dec 15 10:05:30 crc kubenswrapper[4876]: E1215 10:05:30.563472 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af4347b6-7061-42ee-b536-a13ef19519b8" containerName="registry-server" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.563498 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="af4347b6-7061-42ee-b536-a13ef19519b8" containerName="registry-server" Dec 15 10:05:30 crc kubenswrapper[4876]: E1215 10:05:30.563543 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af4347b6-7061-42ee-b536-a13ef19519b8" containerName="extract-utilities" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.563552 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="af4347b6-7061-42ee-b536-a13ef19519b8" containerName="extract-utilities" Dec 15 10:05:30 crc kubenswrapper[4876]: E1215 10:05:30.563571 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af4347b6-7061-42ee-b536-a13ef19519b8" containerName="extract-content" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.563583 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="af4347b6-7061-42ee-b536-a13ef19519b8" containerName="extract-content" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.563856 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="af4347b6-7061-42ee-b536-a13ef19519b8" containerName="registry-server" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.565799 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.663445 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tmfxn"] Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.679782 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-catalog-content\") pod \"redhat-operators-tmfxn\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.679849 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhp4n\" (UniqueName: \"kubernetes.io/projected/6af721db-0bfd-4433-9a1f-da3756bcc20b-kube-api-access-jhp4n\") pod \"redhat-operators-tmfxn\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.680036 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-utilities\") pod \"redhat-operators-tmfxn\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.783290 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-catalog-content\") pod \"redhat-operators-tmfxn\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.783329 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhp4n\" (UniqueName: \"kubernetes.io/projected/6af721db-0bfd-4433-9a1f-da3756bcc20b-kube-api-access-jhp4n\") pod \"redhat-operators-tmfxn\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.783507 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-utilities\") pod \"redhat-operators-tmfxn\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.783886 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-catalog-content\") pod \"redhat-operators-tmfxn\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.784120 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-utilities\") pod \"redhat-operators-tmfxn\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.806953 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhp4n\" (UniqueName: \"kubernetes.io/projected/6af721db-0bfd-4433-9a1f-da3756bcc20b-kube-api-access-jhp4n\") pod \"redhat-operators-tmfxn\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:30 crc kubenswrapper[4876]: I1215 10:05:30.912758 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:31 crc kubenswrapper[4876]: I1215 10:05:31.667306 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tmfxn"] Dec 15 10:05:31 crc kubenswrapper[4876]: I1215 10:05:31.801981 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmfxn" event={"ID":"6af721db-0bfd-4433-9a1f-da3756bcc20b","Type":"ContainerStarted","Data":"2c71419c59d7cb58635779e3f19917c99f9ea5f703162e7bd35a5da6a8da0a92"} Dec 15 10:05:32 crc kubenswrapper[4876]: I1215 10:05:32.812403 4876 generic.go:334] "Generic (PLEG): container finished" podID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerID="31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda" exitCode=0 Dec 15 10:05:32 crc kubenswrapper[4876]: I1215 10:05:32.812454 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmfxn" event={"ID":"6af721db-0bfd-4433-9a1f-da3756bcc20b","Type":"ContainerDied","Data":"31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda"} Dec 15 10:05:33 crc kubenswrapper[4876]: I1215 10:05:33.822442 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmfxn" event={"ID":"6af721db-0bfd-4433-9a1f-da3756bcc20b","Type":"ContainerStarted","Data":"9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12"} Dec 15 10:05:36 crc kubenswrapper[4876]: I1215 10:05:36.859838 4876 generic.go:334] "Generic (PLEG): container finished" podID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerID="9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12" exitCode=0 Dec 15 10:05:36 crc kubenswrapper[4876]: I1215 10:05:36.859922 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmfxn" event={"ID":"6af721db-0bfd-4433-9a1f-da3756bcc20b","Type":"ContainerDied","Data":"9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12"} Dec 15 10:05:37 crc kubenswrapper[4876]: I1215 10:05:37.874991 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmfxn" event={"ID":"6af721db-0bfd-4433-9a1f-da3756bcc20b","Type":"ContainerStarted","Data":"9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5"} Dec 15 10:05:40 crc kubenswrapper[4876]: I1215 10:05:40.913756 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:40 crc kubenswrapper[4876]: I1215 10:05:40.915726 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:41 crc kubenswrapper[4876]: I1215 10:05:41.973300 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tmfxn" podUID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerName="registry-server" probeResult="failure" output=< Dec 15 10:05:41 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 10:05:41 crc kubenswrapper[4876]: > Dec 15 10:05:50 crc kubenswrapper[4876]: I1215 10:05:50.981150 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:51 crc kubenswrapper[4876]: I1215 10:05:51.018822 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tmfxn" podStartSLOduration=16.509245356 podStartE2EDuration="21.018796987s" podCreationTimestamp="2025-12-15 10:05:30 +0000 UTC" firstStartedPulling="2025-12-15 10:05:32.814255652 +0000 UTC m=+11658.385398563" lastFinishedPulling="2025-12-15 10:05:37.323807293 +0000 UTC m=+11662.894950194" observedRunningTime="2025-12-15 10:05:37.906024306 +0000 UTC m=+11663.477167217" watchObservedRunningTime="2025-12-15 10:05:51.018796987 +0000 UTC m=+11676.589939898" Dec 15 10:05:51 crc kubenswrapper[4876]: I1215 10:05:51.031028 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:51 crc kubenswrapper[4876]: I1215 10:05:51.222687 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tmfxn"] Dec 15 10:05:52 crc kubenswrapper[4876]: I1215 10:05:52.011275 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tmfxn" podUID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerName="registry-server" containerID="cri-o://9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5" gracePeriod=2 Dec 15 10:05:52 crc kubenswrapper[4876]: I1215 10:05:52.909901 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.003979 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-catalog-content\") pod \"6af721db-0bfd-4433-9a1f-da3756bcc20b\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.004507 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-utilities\") pod \"6af721db-0bfd-4433-9a1f-da3756bcc20b\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.004639 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhp4n\" (UniqueName: \"kubernetes.io/projected/6af721db-0bfd-4433-9a1f-da3756bcc20b-kube-api-access-jhp4n\") pod \"6af721db-0bfd-4433-9a1f-da3756bcc20b\" (UID: \"6af721db-0bfd-4433-9a1f-da3756bcc20b\") " Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.005377 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-utilities" (OuterVolumeSpecName: "utilities") pod "6af721db-0bfd-4433-9a1f-da3756bcc20b" (UID: "6af721db-0bfd-4433-9a1f-da3756bcc20b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.020610 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6af721db-0bfd-4433-9a1f-da3756bcc20b-kube-api-access-jhp4n" (OuterVolumeSpecName: "kube-api-access-jhp4n") pod "6af721db-0bfd-4433-9a1f-da3756bcc20b" (UID: "6af721db-0bfd-4433-9a1f-da3756bcc20b"). InnerVolumeSpecName "kube-api-access-jhp4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.035957 4876 generic.go:334] "Generic (PLEG): container finished" podID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerID="9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5" exitCode=0 Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.036014 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmfxn" event={"ID":"6af721db-0bfd-4433-9a1f-da3756bcc20b","Type":"ContainerDied","Data":"9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5"} Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.036065 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tmfxn" event={"ID":"6af721db-0bfd-4433-9a1f-da3756bcc20b","Type":"ContainerDied","Data":"2c71419c59d7cb58635779e3f19917c99f9ea5f703162e7bd35a5da6a8da0a92"} Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.036087 4876 scope.go:117] "RemoveContainer" containerID="9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.036091 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tmfxn" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.072561 4876 scope.go:117] "RemoveContainer" containerID="9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.099338 4876 scope.go:117] "RemoveContainer" containerID="31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.106926 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhp4n\" (UniqueName: \"kubernetes.io/projected/6af721db-0bfd-4433-9a1f-da3756bcc20b-kube-api-access-jhp4n\") on node \"crc\" DevicePath \"\"" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.106953 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.151900 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6af721db-0bfd-4433-9a1f-da3756bcc20b" (UID: "6af721db-0bfd-4433-9a1f-da3756bcc20b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.159737 4876 scope.go:117] "RemoveContainer" containerID="9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5" Dec 15 10:05:53 crc kubenswrapper[4876]: E1215 10:05:53.160289 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5\": container with ID starting with 9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5 not found: ID does not exist" containerID="9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.160348 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5"} err="failed to get container status \"9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5\": rpc error: code = NotFound desc = could not find container \"9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5\": container with ID starting with 9c9957006dbff264a3d8670aeb9530cb0d8f20b998877ec15d5054f1173762d5 not found: ID does not exist" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.160387 4876 scope.go:117] "RemoveContainer" containerID="9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12" Dec 15 10:05:53 crc kubenswrapper[4876]: E1215 10:05:53.160830 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12\": container with ID starting with 9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12 not found: ID does not exist" containerID="9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.160857 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12"} err="failed to get container status \"9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12\": rpc error: code = NotFound desc = could not find container \"9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12\": container with ID starting with 9a4b5745421ba33257df3334de074a2514e2c08ba56b463350a4107801005d12 not found: ID does not exist" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.160876 4876 scope.go:117] "RemoveContainer" containerID="31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda" Dec 15 10:05:53 crc kubenswrapper[4876]: E1215 10:05:53.161319 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda\": container with ID starting with 31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda not found: ID does not exist" containerID="31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.161343 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda"} err="failed to get container status \"31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda\": rpc error: code = NotFound desc = could not find container \"31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda\": container with ID starting with 31eea11c8124f85a2333ed47f59d9ad236a5df291ba6388740679a1aa82c4dda not found: ID does not exist" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.208802 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6af721db-0bfd-4433-9a1f-da3756bcc20b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.370242 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tmfxn"] Dec 15 10:05:53 crc kubenswrapper[4876]: I1215 10:05:53.379885 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tmfxn"] Dec 15 10:05:54 crc kubenswrapper[4876]: I1215 10:05:54.718342 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6af721db-0bfd-4433-9a1f-da3756bcc20b" path="/var/lib/kubelet/pods/6af721db-0bfd-4433-9a1f-da3756bcc20b/volumes" Dec 15 10:05:57 crc kubenswrapper[4876]: I1215 10:05:57.322309 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:05:57 crc kubenswrapper[4876]: I1215 10:05:57.322911 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:06:27 crc kubenswrapper[4876]: I1215 10:06:27.322970 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:06:27 crc kubenswrapper[4876]: I1215 10:06:27.323585 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:06:27 crc kubenswrapper[4876]: I1215 10:06:27.323642 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 10:06:27 crc kubenswrapper[4876]: I1215 10:06:27.324502 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b5d396a91bd8b093bbc76d88958c18367efd4df6f555db8f24fe0a5b15db35df"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 10:06:27 crc kubenswrapper[4876]: I1215 10:06:27.324561 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://b5d396a91bd8b093bbc76d88958c18367efd4df6f555db8f24fe0a5b15db35df" gracePeriod=600 Dec 15 10:06:28 crc kubenswrapper[4876]: I1215 10:06:28.385171 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="b5d396a91bd8b093bbc76d88958c18367efd4df6f555db8f24fe0a5b15db35df" exitCode=0 Dec 15 10:06:28 crc kubenswrapper[4876]: I1215 10:06:28.385247 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"b5d396a91bd8b093bbc76d88958c18367efd4df6f555db8f24fe0a5b15db35df"} Dec 15 10:06:28 crc kubenswrapper[4876]: I1215 10:06:28.385780 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df"} Dec 15 10:06:28 crc kubenswrapper[4876]: I1215 10:06:28.385802 4876 scope.go:117] "RemoveContainer" containerID="da4c7e650fc13668280f27cde0cea7e4a3f52b82971484d6a6982e8275062d28" Dec 15 10:08:27 crc kubenswrapper[4876]: I1215 10:08:27.322426 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:08:27 crc kubenswrapper[4876]: I1215 10:08:27.323665 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:08:57 crc kubenswrapper[4876]: I1215 10:08:57.323034 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:08:57 crc kubenswrapper[4876]: I1215 10:08:57.323502 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:09:27 crc kubenswrapper[4876]: I1215 10:09:27.323472 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:09:27 crc kubenswrapper[4876]: I1215 10:09:27.325237 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:09:27 crc kubenswrapper[4876]: I1215 10:09:27.325381 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 10:09:27 crc kubenswrapper[4876]: I1215 10:09:27.326244 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 10:09:27 crc kubenswrapper[4876]: I1215 10:09:27.326406 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" gracePeriod=600 Dec 15 10:09:27 crc kubenswrapper[4876]: E1215 10:09:27.466358 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:09:28 crc kubenswrapper[4876]: I1215 10:09:28.337531 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" exitCode=0 Dec 15 10:09:28 crc kubenswrapper[4876]: I1215 10:09:28.337606 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df"} Dec 15 10:09:28 crc kubenswrapper[4876]: I1215 10:09:28.337853 4876 scope.go:117] "RemoveContainer" containerID="b5d396a91bd8b093bbc76d88958c18367efd4df6f555db8f24fe0a5b15db35df" Dec 15 10:09:28 crc kubenswrapper[4876]: I1215 10:09:28.338707 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:09:28 crc kubenswrapper[4876]: E1215 10:09:28.339202 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:09:41 crc kubenswrapper[4876]: I1215 10:09:41.705843 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:09:41 crc kubenswrapper[4876]: E1215 10:09:41.706780 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:09:55 crc kubenswrapper[4876]: I1215 10:09:55.705444 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:09:55 crc kubenswrapper[4876]: E1215 10:09:55.706288 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:10:07 crc kubenswrapper[4876]: I1215 10:10:07.705613 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:10:07 crc kubenswrapper[4876]: E1215 10:10:07.706568 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:10:12 crc kubenswrapper[4876]: I1215 10:10:12.862868 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ddwws"] Dec 15 10:10:12 crc kubenswrapper[4876]: E1215 10:10:12.863788 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerName="extract-content" Dec 15 10:10:12 crc kubenswrapper[4876]: I1215 10:10:12.863810 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerName="extract-content" Dec 15 10:10:12 crc kubenswrapper[4876]: E1215 10:10:12.863842 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerName="registry-server" Dec 15 10:10:12 crc kubenswrapper[4876]: I1215 10:10:12.863848 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerName="registry-server" Dec 15 10:10:12 crc kubenswrapper[4876]: E1215 10:10:12.863876 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerName="extract-utilities" Dec 15 10:10:12 crc kubenswrapper[4876]: I1215 10:10:12.863884 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerName="extract-utilities" Dec 15 10:10:12 crc kubenswrapper[4876]: I1215 10:10:12.866125 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="6af721db-0bfd-4433-9a1f-da3756bcc20b" containerName="registry-server" Dec 15 10:10:12 crc kubenswrapper[4876]: I1215 10:10:12.868221 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:12 crc kubenswrapper[4876]: I1215 10:10:12.889352 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ddwws"] Dec 15 10:10:12 crc kubenswrapper[4876]: I1215 10:10:12.980502 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kqlb\" (UniqueName: \"kubernetes.io/projected/370b12de-85af-4325-9f6e-b50fc27bc494-kube-api-access-4kqlb\") pod \"redhat-marketplace-ddwws\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:12 crc kubenswrapper[4876]: I1215 10:10:12.980682 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-utilities\") pod \"redhat-marketplace-ddwws\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:12 crc kubenswrapper[4876]: I1215 10:10:12.981063 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-catalog-content\") pod \"redhat-marketplace-ddwws\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:13 crc kubenswrapper[4876]: I1215 10:10:13.083044 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-catalog-content\") pod \"redhat-marketplace-ddwws\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:13 crc kubenswrapper[4876]: I1215 10:10:13.083140 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kqlb\" (UniqueName: \"kubernetes.io/projected/370b12de-85af-4325-9f6e-b50fc27bc494-kube-api-access-4kqlb\") pod \"redhat-marketplace-ddwws\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:13 crc kubenswrapper[4876]: I1215 10:10:13.083201 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-utilities\") pod \"redhat-marketplace-ddwws\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:13 crc kubenswrapper[4876]: I1215 10:10:13.083624 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-catalog-content\") pod \"redhat-marketplace-ddwws\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:13 crc kubenswrapper[4876]: I1215 10:10:13.083698 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-utilities\") pod \"redhat-marketplace-ddwws\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:13 crc kubenswrapper[4876]: I1215 10:10:13.104055 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kqlb\" (UniqueName: \"kubernetes.io/projected/370b12de-85af-4325-9f6e-b50fc27bc494-kube-api-access-4kqlb\") pod \"redhat-marketplace-ddwws\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:13 crc kubenswrapper[4876]: I1215 10:10:13.191869 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:13 crc kubenswrapper[4876]: I1215 10:10:13.769891 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ddwws"] Dec 15 10:10:13 crc kubenswrapper[4876]: I1215 10:10:13.890238 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ddwws" event={"ID":"370b12de-85af-4325-9f6e-b50fc27bc494","Type":"ContainerStarted","Data":"2e125a2e5cd07fea5a916a714677be49263b443f4711e91b849bd996332a7ffe"} Dec 15 10:10:14 crc kubenswrapper[4876]: I1215 10:10:14.902421 4876 generic.go:334] "Generic (PLEG): container finished" podID="370b12de-85af-4325-9f6e-b50fc27bc494" containerID="bcb88a2e13c7c9a8970180e55b184d693faeb290296b4b68a9dbb18f4b6f9a9c" exitCode=0 Dec 15 10:10:14 crc kubenswrapper[4876]: I1215 10:10:14.902466 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ddwws" event={"ID":"370b12de-85af-4325-9f6e-b50fc27bc494","Type":"ContainerDied","Data":"bcb88a2e13c7c9a8970180e55b184d693faeb290296b4b68a9dbb18f4b6f9a9c"} Dec 15 10:10:14 crc kubenswrapper[4876]: I1215 10:10:14.905334 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 10:10:16 crc kubenswrapper[4876]: I1215 10:10:16.923058 4876 generic.go:334] "Generic (PLEG): container finished" podID="370b12de-85af-4325-9f6e-b50fc27bc494" containerID="cef81a4f7c404ceb73024200aa7cfd086daf1ff9fb24079b6dd8f4902c588965" exitCode=0 Dec 15 10:10:16 crc kubenswrapper[4876]: I1215 10:10:16.923153 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ddwws" event={"ID":"370b12de-85af-4325-9f6e-b50fc27bc494","Type":"ContainerDied","Data":"cef81a4f7c404ceb73024200aa7cfd086daf1ff9fb24079b6dd8f4902c588965"} Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.271220 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4t462"] Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.274502 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.286134 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4t462"] Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.392000 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-catalog-content\") pod \"certified-operators-4t462\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.392072 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpnfh\" (UniqueName: \"kubernetes.io/projected/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-kube-api-access-fpnfh\") pod \"certified-operators-4t462\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.392171 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-utilities\") pod \"certified-operators-4t462\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.494392 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-utilities\") pod \"certified-operators-4t462\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.494540 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-catalog-content\") pod \"certified-operators-4t462\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.494595 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpnfh\" (UniqueName: \"kubernetes.io/projected/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-kube-api-access-fpnfh\") pod \"certified-operators-4t462\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.495261 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-catalog-content\") pod \"certified-operators-4t462\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.495326 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-utilities\") pod \"certified-operators-4t462\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.524870 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpnfh\" (UniqueName: \"kubernetes.io/projected/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-kube-api-access-fpnfh\") pod \"certified-operators-4t462\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.610057 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:17 crc kubenswrapper[4876]: I1215 10:10:17.986602 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ddwws" event={"ID":"370b12de-85af-4325-9f6e-b50fc27bc494","Type":"ContainerStarted","Data":"17e5cd90b132904d57267b5c2469fd445a470c6d5abbc00b3dd2104c523a407e"} Dec 15 10:10:18 crc kubenswrapper[4876]: I1215 10:10:18.035495 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ddwws" podStartSLOduration=3.5198402570000002 podStartE2EDuration="6.035470884s" podCreationTimestamp="2025-12-15 10:10:12 +0000 UTC" firstStartedPulling="2025-12-15 10:10:14.905060904 +0000 UTC m=+11940.476203815" lastFinishedPulling="2025-12-15 10:10:17.420691531 +0000 UTC m=+11942.991834442" observedRunningTime="2025-12-15 10:10:18.031746775 +0000 UTC m=+11943.602889686" watchObservedRunningTime="2025-12-15 10:10:18.035470884 +0000 UTC m=+11943.606613795" Dec 15 10:10:18 crc kubenswrapper[4876]: I1215 10:10:18.268669 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4t462"] Dec 15 10:10:18 crc kubenswrapper[4876]: I1215 10:10:18.997874 4876 generic.go:334] "Generic (PLEG): container finished" podID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerID="e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab" exitCode=0 Dec 15 10:10:18 crc kubenswrapper[4876]: I1215 10:10:18.997986 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t462" event={"ID":"efc53a06-3340-49bf-a0ff-68d2fe3aabc7","Type":"ContainerDied","Data":"e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab"} Dec 15 10:10:18 crc kubenswrapper[4876]: I1215 10:10:18.998279 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t462" event={"ID":"efc53a06-3340-49bf-a0ff-68d2fe3aabc7","Type":"ContainerStarted","Data":"b7920beec69a396327c737d1faea026dbdcbf2f4233f3827fd00e5f8f0c15552"} Dec 15 10:10:20 crc kubenswrapper[4876]: I1215 10:10:20.008770 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t462" event={"ID":"efc53a06-3340-49bf-a0ff-68d2fe3aabc7","Type":"ContainerStarted","Data":"e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1"} Dec 15 10:10:20 crc kubenswrapper[4876]: I1215 10:10:20.705492 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:10:20 crc kubenswrapper[4876]: E1215 10:10:20.705994 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:10:21 crc kubenswrapper[4876]: I1215 10:10:21.030710 4876 generic.go:334] "Generic (PLEG): container finished" podID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerID="e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1" exitCode=0 Dec 15 10:10:21 crc kubenswrapper[4876]: I1215 10:10:21.030763 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t462" event={"ID":"efc53a06-3340-49bf-a0ff-68d2fe3aabc7","Type":"ContainerDied","Data":"e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1"} Dec 15 10:10:22 crc kubenswrapper[4876]: I1215 10:10:22.042616 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t462" event={"ID":"efc53a06-3340-49bf-a0ff-68d2fe3aabc7","Type":"ContainerStarted","Data":"d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55"} Dec 15 10:10:22 crc kubenswrapper[4876]: I1215 10:10:22.069897 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4t462" podStartSLOduration=2.365674156 podStartE2EDuration="5.069879309s" podCreationTimestamp="2025-12-15 10:10:17 +0000 UTC" firstStartedPulling="2025-12-15 10:10:19.000642886 +0000 UTC m=+11944.571785817" lastFinishedPulling="2025-12-15 10:10:21.704848059 +0000 UTC m=+11947.275990970" observedRunningTime="2025-12-15 10:10:22.060832056 +0000 UTC m=+11947.631974967" watchObservedRunningTime="2025-12-15 10:10:22.069879309 +0000 UTC m=+11947.641022220" Dec 15 10:10:23 crc kubenswrapper[4876]: I1215 10:10:23.193465 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:23 crc kubenswrapper[4876]: I1215 10:10:23.193865 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:23 crc kubenswrapper[4876]: I1215 10:10:23.245399 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:24 crc kubenswrapper[4876]: I1215 10:10:24.146708 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:27 crc kubenswrapper[4876]: I1215 10:10:27.610528 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:27 crc kubenswrapper[4876]: I1215 10:10:27.610884 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:27 crc kubenswrapper[4876]: I1215 10:10:27.662024 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:27 crc kubenswrapper[4876]: I1215 10:10:27.671911 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ddwws"] Dec 15 10:10:27 crc kubenswrapper[4876]: I1215 10:10:27.672279 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ddwws" podUID="370b12de-85af-4325-9f6e-b50fc27bc494" containerName="registry-server" containerID="cri-o://17e5cd90b132904d57267b5c2469fd445a470c6d5abbc00b3dd2104c523a407e" gracePeriod=2 Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.107010 4876 generic.go:334] "Generic (PLEG): container finished" podID="370b12de-85af-4325-9f6e-b50fc27bc494" containerID="17e5cd90b132904d57267b5c2469fd445a470c6d5abbc00b3dd2104c523a407e" exitCode=0 Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.107065 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ddwws" event={"ID":"370b12de-85af-4325-9f6e-b50fc27bc494","Type":"ContainerDied","Data":"17e5cd90b132904d57267b5c2469fd445a470c6d5abbc00b3dd2104c523a407e"} Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.161693 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.263459 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.342536 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-catalog-content\") pod \"370b12de-85af-4325-9f6e-b50fc27bc494\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.342640 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-utilities\") pod \"370b12de-85af-4325-9f6e-b50fc27bc494\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.342724 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kqlb\" (UniqueName: \"kubernetes.io/projected/370b12de-85af-4325-9f6e-b50fc27bc494-kube-api-access-4kqlb\") pod \"370b12de-85af-4325-9f6e-b50fc27bc494\" (UID: \"370b12de-85af-4325-9f6e-b50fc27bc494\") " Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.343373 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-utilities" (OuterVolumeSpecName: "utilities") pod "370b12de-85af-4325-9f6e-b50fc27bc494" (UID: "370b12de-85af-4325-9f6e-b50fc27bc494"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.358085 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/370b12de-85af-4325-9f6e-b50fc27bc494-kube-api-access-4kqlb" (OuterVolumeSpecName: "kube-api-access-4kqlb") pod "370b12de-85af-4325-9f6e-b50fc27bc494" (UID: "370b12de-85af-4325-9f6e-b50fc27bc494"). InnerVolumeSpecName "kube-api-access-4kqlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.366760 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "370b12de-85af-4325-9f6e-b50fc27bc494" (UID: "370b12de-85af-4325-9f6e-b50fc27bc494"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.445772 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.445807 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/370b12de-85af-4325-9f6e-b50fc27bc494-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:10:28 crc kubenswrapper[4876]: I1215 10:10:28.445820 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kqlb\" (UniqueName: \"kubernetes.io/projected/370b12de-85af-4325-9f6e-b50fc27bc494-kube-api-access-4kqlb\") on node \"crc\" DevicePath \"\"" Dec 15 10:10:29 crc kubenswrapper[4876]: I1215 10:10:29.120545 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ddwws" Dec 15 10:10:29 crc kubenswrapper[4876]: I1215 10:10:29.120833 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ddwws" event={"ID":"370b12de-85af-4325-9f6e-b50fc27bc494","Type":"ContainerDied","Data":"2e125a2e5cd07fea5a916a714677be49263b443f4711e91b849bd996332a7ffe"} Dec 15 10:10:29 crc kubenswrapper[4876]: I1215 10:10:29.121027 4876 scope.go:117] "RemoveContainer" containerID="17e5cd90b132904d57267b5c2469fd445a470c6d5abbc00b3dd2104c523a407e" Dec 15 10:10:29 crc kubenswrapper[4876]: I1215 10:10:29.148308 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ddwws"] Dec 15 10:10:29 crc kubenswrapper[4876]: I1215 10:10:29.154484 4876 scope.go:117] "RemoveContainer" containerID="cef81a4f7c404ceb73024200aa7cfd086daf1ff9fb24079b6dd8f4902c588965" Dec 15 10:10:29 crc kubenswrapper[4876]: I1215 10:10:29.163187 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ddwws"] Dec 15 10:10:29 crc kubenswrapper[4876]: I1215 10:10:29.202334 4876 scope.go:117] "RemoveContainer" containerID="bcb88a2e13c7c9a8970180e55b184d693faeb290296b4b68a9dbb18f4b6f9a9c" Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.057351 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4t462"] Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.138778 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4t462" podUID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerName="registry-server" containerID="cri-o://d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55" gracePeriod=2 Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.676227 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.718001 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="370b12de-85af-4325-9f6e-b50fc27bc494" path="/var/lib/kubelet/pods/370b12de-85af-4325-9f6e-b50fc27bc494/volumes" Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.793987 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpnfh\" (UniqueName: \"kubernetes.io/projected/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-kube-api-access-fpnfh\") pod \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.794177 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-utilities\") pod \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.794256 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-catalog-content\") pod \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\" (UID: \"efc53a06-3340-49bf-a0ff-68d2fe3aabc7\") " Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.796032 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-utilities" (OuterVolumeSpecName: "utilities") pod "efc53a06-3340-49bf-a0ff-68d2fe3aabc7" (UID: "efc53a06-3340-49bf-a0ff-68d2fe3aabc7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.811348 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-kube-api-access-fpnfh" (OuterVolumeSpecName: "kube-api-access-fpnfh") pod "efc53a06-3340-49bf-a0ff-68d2fe3aabc7" (UID: "efc53a06-3340-49bf-a0ff-68d2fe3aabc7"). InnerVolumeSpecName "kube-api-access-fpnfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.860193 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "efc53a06-3340-49bf-a0ff-68d2fe3aabc7" (UID: "efc53a06-3340-49bf-a0ff-68d2fe3aabc7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.896280 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.896308 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:10:30 crc kubenswrapper[4876]: I1215 10:10:30.896318 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpnfh\" (UniqueName: \"kubernetes.io/projected/efc53a06-3340-49bf-a0ff-68d2fe3aabc7-kube-api-access-fpnfh\") on node \"crc\" DevicePath \"\"" Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.149737 4876 generic.go:334] "Generic (PLEG): container finished" podID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerID="d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55" exitCode=0 Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.149787 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t462" event={"ID":"efc53a06-3340-49bf-a0ff-68d2fe3aabc7","Type":"ContainerDied","Data":"d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55"} Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.149821 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4t462" event={"ID":"efc53a06-3340-49bf-a0ff-68d2fe3aabc7","Type":"ContainerDied","Data":"b7920beec69a396327c737d1faea026dbdcbf2f4233f3827fd00e5f8f0c15552"} Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.149852 4876 scope.go:117] "RemoveContainer" containerID="d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55" Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.150003 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4t462" Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.184911 4876 scope.go:117] "RemoveContainer" containerID="e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1" Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.187515 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4t462"] Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.201818 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4t462"] Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.214065 4876 scope.go:117] "RemoveContainer" containerID="e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab" Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.268731 4876 scope.go:117] "RemoveContainer" containerID="d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55" Dec 15 10:10:31 crc kubenswrapper[4876]: E1215 10:10:31.269423 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55\": container with ID starting with d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55 not found: ID does not exist" containerID="d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55" Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.269452 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55"} err="failed to get container status \"d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55\": rpc error: code = NotFound desc = could not find container \"d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55\": container with ID starting with d4c88f9222ad75b668c78201fe82d3006744860a62666a8de341bf80bd11ba55 not found: ID does not exist" Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.269473 4876 scope.go:117] "RemoveContainer" containerID="e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1" Dec 15 10:10:31 crc kubenswrapper[4876]: E1215 10:10:31.270217 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1\": container with ID starting with e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1 not found: ID does not exist" containerID="e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1" Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.270243 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1"} err="failed to get container status \"e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1\": rpc error: code = NotFound desc = could not find container \"e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1\": container with ID starting with e08a081c10ca009374d4cdafe80b14622e8256d8f02e45262dacb9e52b4d07e1 not found: ID does not exist" Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.270257 4876 scope.go:117] "RemoveContainer" containerID="e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab" Dec 15 10:10:31 crc kubenswrapper[4876]: E1215 10:10:31.270867 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab\": container with ID starting with e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab not found: ID does not exist" containerID="e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab" Dec 15 10:10:31 crc kubenswrapper[4876]: I1215 10:10:31.270905 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab"} err="failed to get container status \"e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab\": rpc error: code = NotFound desc = could not find container \"e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab\": container with ID starting with e3bfce510c099e291a028265ad1ad5ed36b5c6e7e0681269eabe6b46eb112fab not found: ID does not exist" Dec 15 10:10:32 crc kubenswrapper[4876]: I1215 10:10:32.748440 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" path="/var/lib/kubelet/pods/efc53a06-3340-49bf-a0ff-68d2fe3aabc7/volumes" Dec 15 10:10:35 crc kubenswrapper[4876]: I1215 10:10:35.706705 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:10:35 crc kubenswrapper[4876]: E1215 10:10:35.707598 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:10:49 crc kubenswrapper[4876]: I1215 10:10:49.705767 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:10:49 crc kubenswrapper[4876]: E1215 10:10:49.706620 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:11:00 crc kubenswrapper[4876]: I1215 10:11:00.706011 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:11:00 crc kubenswrapper[4876]: E1215 10:11:00.706830 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:11:15 crc kubenswrapper[4876]: I1215 10:11:15.705899 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:11:15 crc kubenswrapper[4876]: E1215 10:11:15.706782 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:11:29 crc kubenswrapper[4876]: I1215 10:11:29.706579 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:11:29 crc kubenswrapper[4876]: E1215 10:11:29.707850 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:11:41 crc kubenswrapper[4876]: I1215 10:11:41.706286 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:11:41 crc kubenswrapper[4876]: E1215 10:11:41.707191 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:11:53 crc kubenswrapper[4876]: I1215 10:11:53.705772 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:11:53 crc kubenswrapper[4876]: E1215 10:11:53.706472 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:12:07 crc kubenswrapper[4876]: I1215 10:12:07.707094 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:12:07 crc kubenswrapper[4876]: E1215 10:12:07.708049 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:12:20 crc kubenswrapper[4876]: I1215 10:12:20.706419 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:12:20 crc kubenswrapper[4876]: E1215 10:12:20.707197 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:12:32 crc kubenswrapper[4876]: I1215 10:12:32.707410 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:12:32 crc kubenswrapper[4876]: E1215 10:12:32.708455 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:12:43 crc kubenswrapper[4876]: I1215 10:12:43.706762 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:12:43 crc kubenswrapper[4876]: E1215 10:12:43.708953 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.102152 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k8jp9"] Dec 15 10:12:48 crc kubenswrapper[4876]: E1215 10:12:48.103206 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerName="registry-server" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.103228 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerName="registry-server" Dec 15 10:12:48 crc kubenswrapper[4876]: E1215 10:12:48.103245 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerName="extract-utilities" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.103253 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerName="extract-utilities" Dec 15 10:12:48 crc kubenswrapper[4876]: E1215 10:12:48.103263 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370b12de-85af-4325-9f6e-b50fc27bc494" containerName="extract-content" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.103269 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="370b12de-85af-4325-9f6e-b50fc27bc494" containerName="extract-content" Dec 15 10:12:48 crc kubenswrapper[4876]: E1215 10:12:48.103285 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370b12de-85af-4325-9f6e-b50fc27bc494" containerName="extract-utilities" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.103291 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="370b12de-85af-4325-9f6e-b50fc27bc494" containerName="extract-utilities" Dec 15 10:12:48 crc kubenswrapper[4876]: E1215 10:12:48.103302 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="370b12de-85af-4325-9f6e-b50fc27bc494" containerName="registry-server" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.103308 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="370b12de-85af-4325-9f6e-b50fc27bc494" containerName="registry-server" Dec 15 10:12:48 crc kubenswrapper[4876]: E1215 10:12:48.103316 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerName="extract-content" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.103323 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerName="extract-content" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.103520 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="efc53a06-3340-49bf-a0ff-68d2fe3aabc7" containerName="registry-server" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.103540 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="370b12de-85af-4325-9f6e-b50fc27bc494" containerName="registry-server" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.105015 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.124318 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k8jp9"] Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.186503 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l5tg\" (UniqueName: \"kubernetes.io/projected/f4fb25a1-39fe-419b-9ba7-5be58c137611-kube-api-access-2l5tg\") pod \"community-operators-k8jp9\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.186592 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-catalog-content\") pod \"community-operators-k8jp9\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.186805 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-utilities\") pod \"community-operators-k8jp9\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.288712 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l5tg\" (UniqueName: \"kubernetes.io/projected/f4fb25a1-39fe-419b-9ba7-5be58c137611-kube-api-access-2l5tg\") pod \"community-operators-k8jp9\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.288788 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-catalog-content\") pod \"community-operators-k8jp9\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.288870 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-utilities\") pod \"community-operators-k8jp9\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.289512 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-utilities\") pod \"community-operators-k8jp9\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.289608 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-catalog-content\") pod \"community-operators-k8jp9\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.318396 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l5tg\" (UniqueName: \"kubernetes.io/projected/f4fb25a1-39fe-419b-9ba7-5be58c137611-kube-api-access-2l5tg\") pod \"community-operators-k8jp9\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:48 crc kubenswrapper[4876]: I1215 10:12:48.437958 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:49 crc kubenswrapper[4876]: I1215 10:12:49.015017 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k8jp9"] Dec 15 10:12:49 crc kubenswrapper[4876]: I1215 10:12:49.632633 4876 generic.go:334] "Generic (PLEG): container finished" podID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerID="468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201" exitCode=0 Dec 15 10:12:49 crc kubenswrapper[4876]: I1215 10:12:49.632713 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8jp9" event={"ID":"f4fb25a1-39fe-419b-9ba7-5be58c137611","Type":"ContainerDied","Data":"468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201"} Dec 15 10:12:49 crc kubenswrapper[4876]: I1215 10:12:49.632887 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8jp9" event={"ID":"f4fb25a1-39fe-419b-9ba7-5be58c137611","Type":"ContainerStarted","Data":"05822193d85477f01755efebf72eec30f67f956c6fedba7b3a3eb49f50222226"} Dec 15 10:12:50 crc kubenswrapper[4876]: I1215 10:12:50.644619 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8jp9" event={"ID":"f4fb25a1-39fe-419b-9ba7-5be58c137611","Type":"ContainerStarted","Data":"bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3"} Dec 15 10:12:51 crc kubenswrapper[4876]: I1215 10:12:51.656541 4876 generic.go:334] "Generic (PLEG): container finished" podID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerID="bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3" exitCode=0 Dec 15 10:12:51 crc kubenswrapper[4876]: I1215 10:12:51.656584 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8jp9" event={"ID":"f4fb25a1-39fe-419b-9ba7-5be58c137611","Type":"ContainerDied","Data":"bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3"} Dec 15 10:12:52 crc kubenswrapper[4876]: I1215 10:12:52.668391 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8jp9" event={"ID":"f4fb25a1-39fe-419b-9ba7-5be58c137611","Type":"ContainerStarted","Data":"e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf"} Dec 15 10:12:52 crc kubenswrapper[4876]: I1215 10:12:52.689335 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k8jp9" podStartSLOduration=2.188092693 podStartE2EDuration="4.689298315s" podCreationTimestamp="2025-12-15 10:12:48 +0000 UTC" firstStartedPulling="2025-12-15 10:12:49.634562449 +0000 UTC m=+12095.205705360" lastFinishedPulling="2025-12-15 10:12:52.135768061 +0000 UTC m=+12097.706910982" observedRunningTime="2025-12-15 10:12:52.687579149 +0000 UTC m=+12098.258722070" watchObservedRunningTime="2025-12-15 10:12:52.689298315 +0000 UTC m=+12098.260441226" Dec 15 10:12:55 crc kubenswrapper[4876]: I1215 10:12:55.706671 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:12:55 crc kubenswrapper[4876]: E1215 10:12:55.707174 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:12:58 crc kubenswrapper[4876]: I1215 10:12:58.438644 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:58 crc kubenswrapper[4876]: I1215 10:12:58.438976 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:58 crc kubenswrapper[4876]: I1215 10:12:58.507355 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:58 crc kubenswrapper[4876]: I1215 10:12:58.792729 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:12:58 crc kubenswrapper[4876]: I1215 10:12:58.853894 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k8jp9"] Dec 15 10:13:00 crc kubenswrapper[4876]: I1215 10:13:00.745029 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-k8jp9" podUID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerName="registry-server" containerID="cri-o://e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf" gracePeriod=2 Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.436913 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.572576 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-utilities\") pod \"f4fb25a1-39fe-419b-9ba7-5be58c137611\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.572759 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-catalog-content\") pod \"f4fb25a1-39fe-419b-9ba7-5be58c137611\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.572839 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2l5tg\" (UniqueName: \"kubernetes.io/projected/f4fb25a1-39fe-419b-9ba7-5be58c137611-kube-api-access-2l5tg\") pod \"f4fb25a1-39fe-419b-9ba7-5be58c137611\" (UID: \"f4fb25a1-39fe-419b-9ba7-5be58c137611\") " Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.573830 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-utilities" (OuterVolumeSpecName: "utilities") pod "f4fb25a1-39fe-419b-9ba7-5be58c137611" (UID: "f4fb25a1-39fe-419b-9ba7-5be58c137611"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.580679 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4fb25a1-39fe-419b-9ba7-5be58c137611-kube-api-access-2l5tg" (OuterVolumeSpecName: "kube-api-access-2l5tg") pod "f4fb25a1-39fe-419b-9ba7-5be58c137611" (UID: "f4fb25a1-39fe-419b-9ba7-5be58c137611"). InnerVolumeSpecName "kube-api-access-2l5tg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.618319 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4fb25a1-39fe-419b-9ba7-5be58c137611" (UID: "f4fb25a1-39fe-419b-9ba7-5be58c137611"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.675553 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2l5tg\" (UniqueName: \"kubernetes.io/projected/f4fb25a1-39fe-419b-9ba7-5be58c137611-kube-api-access-2l5tg\") on node \"crc\" DevicePath \"\"" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.675580 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.675589 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4fb25a1-39fe-419b-9ba7-5be58c137611-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.757721 4876 generic.go:334] "Generic (PLEG): container finished" podID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerID="e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf" exitCode=0 Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.757771 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8jp9" event={"ID":"f4fb25a1-39fe-419b-9ba7-5be58c137611","Type":"ContainerDied","Data":"e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf"} Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.757834 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k8jp9" event={"ID":"f4fb25a1-39fe-419b-9ba7-5be58c137611","Type":"ContainerDied","Data":"05822193d85477f01755efebf72eec30f67f956c6fedba7b3a3eb49f50222226"} Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.757853 4876 scope.go:117] "RemoveContainer" containerID="e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.757873 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k8jp9" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.782564 4876 scope.go:117] "RemoveContainer" containerID="bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.813467 4876 scope.go:117] "RemoveContainer" containerID="468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.832183 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k8jp9"] Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.844442 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-k8jp9"] Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.869963 4876 scope.go:117] "RemoveContainer" containerID="e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf" Dec 15 10:13:01 crc kubenswrapper[4876]: E1215 10:13:01.872002 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf\": container with ID starting with e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf not found: ID does not exist" containerID="e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.872041 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf"} err="failed to get container status \"e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf\": rpc error: code = NotFound desc = could not find container \"e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf\": container with ID starting with e6d9a3ce87e0008ae870af74cd4c40e1a2952c0b5fb6a3930a736a037939ecbf not found: ID does not exist" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.872065 4876 scope.go:117] "RemoveContainer" containerID="bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3" Dec 15 10:13:01 crc kubenswrapper[4876]: E1215 10:13:01.872603 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3\": container with ID starting with bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3 not found: ID does not exist" containerID="bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.872670 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3"} err="failed to get container status \"bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3\": rpc error: code = NotFound desc = could not find container \"bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3\": container with ID starting with bf0813b07c96c8d47500369e326249648bd55e47369c1201d1c3ee80946f3db3 not found: ID does not exist" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.872703 4876 scope.go:117] "RemoveContainer" containerID="468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201" Dec 15 10:13:01 crc kubenswrapper[4876]: E1215 10:13:01.873059 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201\": container with ID starting with 468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201 not found: ID does not exist" containerID="468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201" Dec 15 10:13:01 crc kubenswrapper[4876]: I1215 10:13:01.873094 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201"} err="failed to get container status \"468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201\": rpc error: code = NotFound desc = could not find container \"468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201\": container with ID starting with 468e3a24cb5f1cc23c4a710599af5417377d2a3c934f4c9c5b03b03a1b4e7201 not found: ID does not exist" Dec 15 10:13:02 crc kubenswrapper[4876]: I1215 10:13:02.718142 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4fb25a1-39fe-419b-9ba7-5be58c137611" path="/var/lib/kubelet/pods/f4fb25a1-39fe-419b-9ba7-5be58c137611/volumes" Dec 15 10:13:08 crc kubenswrapper[4876]: I1215 10:13:08.706249 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:13:08 crc kubenswrapper[4876]: E1215 10:13:08.708396 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:13:21 crc kubenswrapper[4876]: I1215 10:13:21.705517 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:13:21 crc kubenswrapper[4876]: E1215 10:13:21.707072 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:13:32 crc kubenswrapper[4876]: I1215 10:13:32.708955 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:13:32 crc kubenswrapper[4876]: E1215 10:13:32.710768 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:13:43 crc kubenswrapper[4876]: I1215 10:13:43.705434 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:13:43 crc kubenswrapper[4876]: E1215 10:13:43.706259 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:13:55 crc kubenswrapper[4876]: I1215 10:13:55.705918 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:13:55 crc kubenswrapper[4876]: E1215 10:13:55.706697 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:14:10 crc kubenswrapper[4876]: I1215 10:14:10.706218 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:14:10 crc kubenswrapper[4876]: E1215 10:14:10.707162 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:14:25 crc kubenswrapper[4876]: I1215 10:14:25.705571 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:14:25 crc kubenswrapper[4876]: E1215 10:14:25.706348 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:14:39 crc kubenswrapper[4876]: I1215 10:14:39.705132 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:14:40 crc kubenswrapper[4876]: I1215 10:14:40.820599 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"e142074ed754ca9a14591d4d8825d6a9ea9fe8facd966263e511d53ff988acd0"} Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.146595 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb"] Dec 15 10:15:00 crc kubenswrapper[4876]: E1215 10:15:00.147667 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerName="registry-server" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.147691 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerName="registry-server" Dec 15 10:15:00 crc kubenswrapper[4876]: E1215 10:15:00.147712 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerName="extract-content" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.147720 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerName="extract-content" Dec 15 10:15:00 crc kubenswrapper[4876]: E1215 10:15:00.147739 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerName="extract-utilities" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.147747 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerName="extract-utilities" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.148006 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4fb25a1-39fe-419b-9ba7-5be58c137611" containerName="registry-server" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.149009 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.153680 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.154478 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.158299 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb"] Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.192665 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/96aadbab-de22-4d4a-89a9-2a663bb19e52-secret-volume\") pod \"collect-profiles-29429895-ktzwb\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.193166 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/96aadbab-de22-4d4a-89a9-2a663bb19e52-config-volume\") pod \"collect-profiles-29429895-ktzwb\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.193282 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkjbs\" (UniqueName: \"kubernetes.io/projected/96aadbab-de22-4d4a-89a9-2a663bb19e52-kube-api-access-pkjbs\") pod \"collect-profiles-29429895-ktzwb\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.295428 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/96aadbab-de22-4d4a-89a9-2a663bb19e52-config-volume\") pod \"collect-profiles-29429895-ktzwb\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.295537 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkjbs\" (UniqueName: \"kubernetes.io/projected/96aadbab-de22-4d4a-89a9-2a663bb19e52-kube-api-access-pkjbs\") pod \"collect-profiles-29429895-ktzwb\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.295675 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/96aadbab-de22-4d4a-89a9-2a663bb19e52-secret-volume\") pod \"collect-profiles-29429895-ktzwb\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.296967 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/96aadbab-de22-4d4a-89a9-2a663bb19e52-config-volume\") pod \"collect-profiles-29429895-ktzwb\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.302957 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/96aadbab-de22-4d4a-89a9-2a663bb19e52-secret-volume\") pod \"collect-profiles-29429895-ktzwb\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.321996 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkjbs\" (UniqueName: \"kubernetes.io/projected/96aadbab-de22-4d4a-89a9-2a663bb19e52-kube-api-access-pkjbs\") pod \"collect-profiles-29429895-ktzwb\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.472644 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:00 crc kubenswrapper[4876]: I1215 10:15:00.944914 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb"] Dec 15 10:15:01 crc kubenswrapper[4876]: I1215 10:15:01.043562 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" event={"ID":"96aadbab-de22-4d4a-89a9-2a663bb19e52","Type":"ContainerStarted","Data":"2fc22a77f97973c8cc438e6e4dd413d6a512c7acde9be982967f9299154a836f"} Dec 15 10:15:02 crc kubenswrapper[4876]: I1215 10:15:02.055807 4876 generic.go:334] "Generic (PLEG): container finished" podID="96aadbab-de22-4d4a-89a9-2a663bb19e52" containerID="a3476d042393f36b9676b1a1eeed7e2db0739a47e4d5ca01bfe446625cd70bcd" exitCode=0 Dec 15 10:15:02 crc kubenswrapper[4876]: I1215 10:15:02.055914 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" event={"ID":"96aadbab-de22-4d4a-89a9-2a663bb19e52","Type":"ContainerDied","Data":"a3476d042393f36b9676b1a1eeed7e2db0739a47e4d5ca01bfe446625cd70bcd"} Dec 15 10:15:03 crc kubenswrapper[4876]: I1215 10:15:03.600433 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:03 crc kubenswrapper[4876]: I1215 10:15:03.772578 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/96aadbab-de22-4d4a-89a9-2a663bb19e52-config-volume\") pod \"96aadbab-de22-4d4a-89a9-2a663bb19e52\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " Dec 15 10:15:03 crc kubenswrapper[4876]: I1215 10:15:03.772708 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/96aadbab-de22-4d4a-89a9-2a663bb19e52-secret-volume\") pod \"96aadbab-de22-4d4a-89a9-2a663bb19e52\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " Dec 15 10:15:03 crc kubenswrapper[4876]: I1215 10:15:03.772807 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkjbs\" (UniqueName: \"kubernetes.io/projected/96aadbab-de22-4d4a-89a9-2a663bb19e52-kube-api-access-pkjbs\") pod \"96aadbab-de22-4d4a-89a9-2a663bb19e52\" (UID: \"96aadbab-de22-4d4a-89a9-2a663bb19e52\") " Dec 15 10:15:03 crc kubenswrapper[4876]: I1215 10:15:03.773299 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96aadbab-de22-4d4a-89a9-2a663bb19e52-config-volume" (OuterVolumeSpecName: "config-volume") pod "96aadbab-de22-4d4a-89a9-2a663bb19e52" (UID: "96aadbab-de22-4d4a-89a9-2a663bb19e52"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 10:15:03 crc kubenswrapper[4876]: I1215 10:15:03.773903 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/96aadbab-de22-4d4a-89a9-2a663bb19e52-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 10:15:03 crc kubenswrapper[4876]: I1215 10:15:03.785457 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96aadbab-de22-4d4a-89a9-2a663bb19e52-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "96aadbab-de22-4d4a-89a9-2a663bb19e52" (UID: "96aadbab-de22-4d4a-89a9-2a663bb19e52"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 10:15:03 crc kubenswrapper[4876]: I1215 10:15:03.793277 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96aadbab-de22-4d4a-89a9-2a663bb19e52-kube-api-access-pkjbs" (OuterVolumeSpecName: "kube-api-access-pkjbs") pod "96aadbab-de22-4d4a-89a9-2a663bb19e52" (UID: "96aadbab-de22-4d4a-89a9-2a663bb19e52"). InnerVolumeSpecName "kube-api-access-pkjbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:15:03 crc kubenswrapper[4876]: I1215 10:15:03.875931 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/96aadbab-de22-4d4a-89a9-2a663bb19e52-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 10:15:03 crc kubenswrapper[4876]: I1215 10:15:03.876300 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkjbs\" (UniqueName: \"kubernetes.io/projected/96aadbab-de22-4d4a-89a9-2a663bb19e52-kube-api-access-pkjbs\") on node \"crc\" DevicePath \"\"" Dec 15 10:15:04 crc kubenswrapper[4876]: I1215 10:15:04.080917 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" event={"ID":"96aadbab-de22-4d4a-89a9-2a663bb19e52","Type":"ContainerDied","Data":"2fc22a77f97973c8cc438e6e4dd413d6a512c7acde9be982967f9299154a836f"} Dec 15 10:15:04 crc kubenswrapper[4876]: I1215 10:15:04.080955 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fc22a77f97973c8cc438e6e4dd413d6a512c7acde9be982967f9299154a836f" Dec 15 10:15:04 crc kubenswrapper[4876]: I1215 10:15:04.081010 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429895-ktzwb" Dec 15 10:15:04 crc kubenswrapper[4876]: I1215 10:15:04.687444 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm"] Dec 15 10:15:04 crc kubenswrapper[4876]: I1215 10:15:04.701862 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429850-whvpm"] Dec 15 10:15:04 crc kubenswrapper[4876]: I1215 10:15:04.723370 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5059b4e9-2c92-4e83-8154-17f66d795ed9" path="/var/lib/kubelet/pods/5059b4e9-2c92-4e83-8154-17f66d795ed9/volumes" Dec 15 10:15:40 crc kubenswrapper[4876]: I1215 10:15:40.271849 4876 scope.go:117] "RemoveContainer" containerID="ab8e24d6b876ea0ad2ff9f024061dda3c87ccfc90bf04f4b47e482898fa89dee" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.694509 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zjk9n"] Dec 15 10:16:13 crc kubenswrapper[4876]: E1215 10:16:13.695615 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96aadbab-de22-4d4a-89a9-2a663bb19e52" containerName="collect-profiles" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.695632 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="96aadbab-de22-4d4a-89a9-2a663bb19e52" containerName="collect-profiles" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.695861 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="96aadbab-de22-4d4a-89a9-2a663bb19e52" containerName="collect-profiles" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.698129 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.712583 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zjk9n"] Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.824353 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-utilities\") pod \"redhat-operators-zjk9n\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.824414 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdw2n\" (UniqueName: \"kubernetes.io/projected/da97a9c9-98d2-458c-9e9b-321af6459e36-kube-api-access-wdw2n\") pod \"redhat-operators-zjk9n\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.824486 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-catalog-content\") pod \"redhat-operators-zjk9n\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.926823 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-utilities\") pod \"redhat-operators-zjk9n\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.926899 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdw2n\" (UniqueName: \"kubernetes.io/projected/da97a9c9-98d2-458c-9e9b-321af6459e36-kube-api-access-wdw2n\") pod \"redhat-operators-zjk9n\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.926947 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-catalog-content\") pod \"redhat-operators-zjk9n\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.927584 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-utilities\") pod \"redhat-operators-zjk9n\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.927617 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-catalog-content\") pod \"redhat-operators-zjk9n\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:13 crc kubenswrapper[4876]: I1215 10:16:13.948495 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdw2n\" (UniqueName: \"kubernetes.io/projected/da97a9c9-98d2-458c-9e9b-321af6459e36-kube-api-access-wdw2n\") pod \"redhat-operators-zjk9n\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:14 crc kubenswrapper[4876]: I1215 10:16:14.018628 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:14 crc kubenswrapper[4876]: I1215 10:16:14.540383 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zjk9n"] Dec 15 10:16:14 crc kubenswrapper[4876]: I1215 10:16:14.812515 4876 generic.go:334] "Generic (PLEG): container finished" podID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerID="c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97" exitCode=0 Dec 15 10:16:14 crc kubenswrapper[4876]: I1215 10:16:14.813195 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zjk9n" event={"ID":"da97a9c9-98d2-458c-9e9b-321af6459e36","Type":"ContainerDied","Data":"c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97"} Dec 15 10:16:14 crc kubenswrapper[4876]: I1215 10:16:14.813230 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zjk9n" event={"ID":"da97a9c9-98d2-458c-9e9b-321af6459e36","Type":"ContainerStarted","Data":"53790e5e061b8eb357ea249564db85a9d1e4bcf8bdc69d2cb98301f91602e633"} Dec 15 10:16:14 crc kubenswrapper[4876]: I1215 10:16:14.815616 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 10:16:15 crc kubenswrapper[4876]: I1215 10:16:15.824763 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zjk9n" event={"ID":"da97a9c9-98d2-458c-9e9b-321af6459e36","Type":"ContainerStarted","Data":"3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2"} Dec 15 10:16:18 crc kubenswrapper[4876]: I1215 10:16:18.865874 4876 generic.go:334] "Generic (PLEG): container finished" podID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerID="3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2" exitCode=0 Dec 15 10:16:18 crc kubenswrapper[4876]: I1215 10:16:18.866340 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zjk9n" event={"ID":"da97a9c9-98d2-458c-9e9b-321af6459e36","Type":"ContainerDied","Data":"3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2"} Dec 15 10:16:19 crc kubenswrapper[4876]: I1215 10:16:19.879922 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zjk9n" event={"ID":"da97a9c9-98d2-458c-9e9b-321af6459e36","Type":"ContainerStarted","Data":"45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459"} Dec 15 10:16:19 crc kubenswrapper[4876]: I1215 10:16:19.906827 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zjk9n" podStartSLOduration=2.342782628 podStartE2EDuration="6.906805207s" podCreationTimestamp="2025-12-15 10:16:13 +0000 UTC" firstStartedPulling="2025-12-15 10:16:14.815314872 +0000 UTC m=+12300.386457793" lastFinishedPulling="2025-12-15 10:16:19.379337441 +0000 UTC m=+12304.950480372" observedRunningTime="2025-12-15 10:16:19.89572414 +0000 UTC m=+12305.466867051" watchObservedRunningTime="2025-12-15 10:16:19.906805207 +0000 UTC m=+12305.477948128" Dec 15 10:16:24 crc kubenswrapper[4876]: I1215 10:16:24.018862 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:24 crc kubenswrapper[4876]: I1215 10:16:24.021306 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:25 crc kubenswrapper[4876]: I1215 10:16:25.091283 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zjk9n" podUID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerName="registry-server" probeResult="failure" output=< Dec 15 10:16:25 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 10:16:25 crc kubenswrapper[4876]: > Dec 15 10:16:34 crc kubenswrapper[4876]: I1215 10:16:34.083140 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:34 crc kubenswrapper[4876]: I1215 10:16:34.131359 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:34 crc kubenswrapper[4876]: I1215 10:16:34.325715 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zjk9n"] Dec 15 10:16:35 crc kubenswrapper[4876]: I1215 10:16:35.173216 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zjk9n" podUID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerName="registry-server" containerID="cri-o://45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459" gracePeriod=2 Dec 15 10:16:35 crc kubenswrapper[4876]: I1215 10:16:35.769259 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:35 crc kubenswrapper[4876]: I1215 10:16:35.836795 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wdw2n\" (UniqueName: \"kubernetes.io/projected/da97a9c9-98d2-458c-9e9b-321af6459e36-kube-api-access-wdw2n\") pod \"da97a9c9-98d2-458c-9e9b-321af6459e36\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " Dec 15 10:16:35 crc kubenswrapper[4876]: I1215 10:16:35.836954 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-catalog-content\") pod \"da97a9c9-98d2-458c-9e9b-321af6459e36\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " Dec 15 10:16:35 crc kubenswrapper[4876]: I1215 10:16:35.836992 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-utilities\") pod \"da97a9c9-98d2-458c-9e9b-321af6459e36\" (UID: \"da97a9c9-98d2-458c-9e9b-321af6459e36\") " Dec 15 10:16:35 crc kubenswrapper[4876]: I1215 10:16:35.838062 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-utilities" (OuterVolumeSpecName: "utilities") pod "da97a9c9-98d2-458c-9e9b-321af6459e36" (UID: "da97a9c9-98d2-458c-9e9b-321af6459e36"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:16:35 crc kubenswrapper[4876]: I1215 10:16:35.842752 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da97a9c9-98d2-458c-9e9b-321af6459e36-kube-api-access-wdw2n" (OuterVolumeSpecName: "kube-api-access-wdw2n") pod "da97a9c9-98d2-458c-9e9b-321af6459e36" (UID: "da97a9c9-98d2-458c-9e9b-321af6459e36"). InnerVolumeSpecName "kube-api-access-wdw2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:16:35 crc kubenswrapper[4876]: I1215 10:16:35.939647 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wdw2n\" (UniqueName: \"kubernetes.io/projected/da97a9c9-98d2-458c-9e9b-321af6459e36-kube-api-access-wdw2n\") on node \"crc\" DevicePath \"\"" Dec 15 10:16:35 crc kubenswrapper[4876]: I1215 10:16:35.939683 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:16:35 crc kubenswrapper[4876]: I1215 10:16:35.951551 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "da97a9c9-98d2-458c-9e9b-321af6459e36" (UID: "da97a9c9-98d2-458c-9e9b-321af6459e36"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.041697 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da97a9c9-98d2-458c-9e9b-321af6459e36-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.184941 4876 generic.go:334] "Generic (PLEG): container finished" podID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerID="45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459" exitCode=0 Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.185152 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zjk9n" event={"ID":"da97a9c9-98d2-458c-9e9b-321af6459e36","Type":"ContainerDied","Data":"45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459"} Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.185358 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zjk9n" event={"ID":"da97a9c9-98d2-458c-9e9b-321af6459e36","Type":"ContainerDied","Data":"53790e5e061b8eb357ea249564db85a9d1e4bcf8bdc69d2cb98301f91602e633"} Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.185391 4876 scope.go:117] "RemoveContainer" containerID="45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.185195 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zjk9n" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.213187 4876 scope.go:117] "RemoveContainer" containerID="3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.244285 4876 scope.go:117] "RemoveContainer" containerID="c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.250653 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zjk9n"] Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.267764 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zjk9n"] Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.294735 4876 scope.go:117] "RemoveContainer" containerID="45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459" Dec 15 10:16:36 crc kubenswrapper[4876]: E1215 10:16:36.295224 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459\": container with ID starting with 45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459 not found: ID does not exist" containerID="45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.295262 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459"} err="failed to get container status \"45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459\": rpc error: code = NotFound desc = could not find container \"45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459\": container with ID starting with 45e5f313f62dfc5147489bbbcfaaa9d951a48054c6610f83460bb937ea85e459 not found: ID does not exist" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.295288 4876 scope.go:117] "RemoveContainer" containerID="3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2" Dec 15 10:16:36 crc kubenswrapper[4876]: E1215 10:16:36.295698 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2\": container with ID starting with 3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2 not found: ID does not exist" containerID="3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.295781 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2"} err="failed to get container status \"3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2\": rpc error: code = NotFound desc = could not find container \"3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2\": container with ID starting with 3bd7c1320dc06da7f0bdaed1b054e2b2f87d565543d3e911682cbf4390d67fe2 not found: ID does not exist" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.295811 4876 scope.go:117] "RemoveContainer" containerID="c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97" Dec 15 10:16:36 crc kubenswrapper[4876]: E1215 10:16:36.296136 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97\": container with ID starting with c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97 not found: ID does not exist" containerID="c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.296163 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97"} err="failed to get container status \"c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97\": rpc error: code = NotFound desc = could not find container \"c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97\": container with ID starting with c8255f05783da1734243c0dfe6fc1228b56009d7477d7e2ec82c5c9ed9245b97 not found: ID does not exist" Dec 15 10:16:36 crc kubenswrapper[4876]: I1215 10:16:36.719639 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da97a9c9-98d2-458c-9e9b-321af6459e36" path="/var/lib/kubelet/pods/da97a9c9-98d2-458c-9e9b-321af6459e36/volumes" Dec 15 10:16:57 crc kubenswrapper[4876]: I1215 10:16:57.322554 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:16:57 crc kubenswrapper[4876]: I1215 10:16:57.323291 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:17:27 crc kubenswrapper[4876]: I1215 10:17:27.322237 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:17:27 crc kubenswrapper[4876]: I1215 10:17:27.323369 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:17:57 crc kubenswrapper[4876]: I1215 10:17:57.322564 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:17:57 crc kubenswrapper[4876]: I1215 10:17:57.323351 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:17:57 crc kubenswrapper[4876]: I1215 10:17:57.323439 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 10:17:57 crc kubenswrapper[4876]: I1215 10:17:57.324613 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e142074ed754ca9a14591d4d8825d6a9ea9fe8facd966263e511d53ff988acd0"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 10:17:57 crc kubenswrapper[4876]: I1215 10:17:57.324720 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://e142074ed754ca9a14591d4d8825d6a9ea9fe8facd966263e511d53ff988acd0" gracePeriod=600 Dec 15 10:17:58 crc kubenswrapper[4876]: I1215 10:17:58.143623 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="e142074ed754ca9a14591d4d8825d6a9ea9fe8facd966263e511d53ff988acd0" exitCode=0 Dec 15 10:17:58 crc kubenswrapper[4876]: I1215 10:17:58.143655 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"e142074ed754ca9a14591d4d8825d6a9ea9fe8facd966263e511d53ff988acd0"} Dec 15 10:17:58 crc kubenswrapper[4876]: I1215 10:17:58.144287 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf"} Dec 15 10:17:58 crc kubenswrapper[4876]: I1215 10:17:58.144317 4876 scope.go:117] "RemoveContainer" containerID="8c070caad90ee953b66f906552beb2334e1af7e482d6594c624ba442a3a047df" Dec 15 10:18:05 crc kubenswrapper[4876]: I1215 10:18:05.235530 4876 generic.go:334] "Generic (PLEG): container finished" podID="7e4c7322-1b07-4628-85ab-5f72f6f44e04" containerID="150f2f27d3e348e1b728f60d4751bc28cf7b9c8b752bf71c7fde0468b6140cfc" exitCode=0 Dec 15 10:18:05 crc kubenswrapper[4876]: I1215 10:18:05.235782 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7e4c7322-1b07-4628-85ab-5f72f6f44e04","Type":"ContainerDied","Data":"150f2f27d3e348e1b728f60d4751bc28cf7b9c8b752bf71c7fde0468b6140cfc"} Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.687802 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.818567 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ca-certs\") pod \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.818702 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.818725 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-workdir\") pod \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.819565 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config\") pod \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.819642 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-config-data\") pod \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.819676 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ssh-key\") pod \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.819729 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzzz5\" (UniqueName: \"kubernetes.io/projected/7e4c7322-1b07-4628-85ab-5f72f6f44e04-kube-api-access-qzzz5\") pod \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.819996 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-temporary\") pod \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.820036 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config-secret\") pod \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\" (UID: \"7e4c7322-1b07-4628-85ab-5f72f6f44e04\") " Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.820622 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "7e4c7322-1b07-4628-85ab-5f72f6f44e04" (UID: "7e4c7322-1b07-4628-85ab-5f72f6f44e04"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.821068 4876 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.822987 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-config-data" (OuterVolumeSpecName: "config-data") pod "7e4c7322-1b07-4628-85ab-5f72f6f44e04" (UID: "7e4c7322-1b07-4628-85ab-5f72f6f44e04"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.824952 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "test-operator-logs") pod "7e4c7322-1b07-4628-85ab-5f72f6f44e04" (UID: "7e4c7322-1b07-4628-85ab-5f72f6f44e04"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.826267 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "7e4c7322-1b07-4628-85ab-5f72f6f44e04" (UID: "7e4c7322-1b07-4628-85ab-5f72f6f44e04"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.828982 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e4c7322-1b07-4628-85ab-5f72f6f44e04-kube-api-access-qzzz5" (OuterVolumeSpecName: "kube-api-access-qzzz5") pod "7e4c7322-1b07-4628-85ab-5f72f6f44e04" (UID: "7e4c7322-1b07-4628-85ab-5f72f6f44e04"). InnerVolumeSpecName "kube-api-access-qzzz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.848648 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7e4c7322-1b07-4628-85ab-5f72f6f44e04" (UID: "7e4c7322-1b07-4628-85ab-5f72f6f44e04"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.854802 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "7e4c7322-1b07-4628-85ab-5f72f6f44e04" (UID: "7e4c7322-1b07-4628-85ab-5f72f6f44e04"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.858359 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "7e4c7322-1b07-4628-85ab-5f72f6f44e04" (UID: "7e4c7322-1b07-4628-85ab-5f72f6f44e04"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.891474 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "7e4c7322-1b07-4628-85ab-5f72f6f44e04" (UID: "7e4c7322-1b07-4628-85ab-5f72f6f44e04"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.924977 4876 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.925021 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzzz5\" (UniqueName: \"kubernetes.io/projected/7e4c7322-1b07-4628-85ab-5f72f6f44e04-kube-api-access-qzzz5\") on node \"crc\" DevicePath \"\"" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.925038 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.925050 4876 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/7e4c7322-1b07-4628-85ab-5f72f6f44e04-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.925154 4876 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.925175 4876 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/7e4c7322-1b07-4628-85ab-5f72f6f44e04-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.925193 4876 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.925207 4876 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7e4c7322-1b07-4628-85ab-5f72f6f44e04-config-data\") on node \"crc\" DevicePath \"\"" Dec 15 10:18:06 crc kubenswrapper[4876]: I1215 10:18:06.976968 4876 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 15 10:18:07 crc kubenswrapper[4876]: I1215 10:18:07.027559 4876 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 15 10:18:07 crc kubenswrapper[4876]: I1215 10:18:07.261077 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"7e4c7322-1b07-4628-85ab-5f72f6f44e04","Type":"ContainerDied","Data":"36e0c591e8c5d4d43c9f97305838e56b6a8419222b2b4caa58c8b542f3c26fca"} Dec 15 10:18:07 crc kubenswrapper[4876]: I1215 10:18:07.261150 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36e0c591e8c5d4d43c9f97305838e56b6a8419222b2b4caa58c8b542f3c26fca" Dec 15 10:18:07 crc kubenswrapper[4876]: I1215 10:18:07.261187 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.071841 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 15 10:18:17 crc kubenswrapper[4876]: E1215 10:18:17.072707 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerName="extract-content" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.072724 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerName="extract-content" Dec 15 10:18:17 crc kubenswrapper[4876]: E1215 10:18:17.072737 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerName="extract-utilities" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.072743 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerName="extract-utilities" Dec 15 10:18:17 crc kubenswrapper[4876]: E1215 10:18:17.072756 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4c7322-1b07-4628-85ab-5f72f6f44e04" containerName="tempest-tests-tempest-tests-runner" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.072763 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4c7322-1b07-4628-85ab-5f72f6f44e04" containerName="tempest-tests-tempest-tests-runner" Dec 15 10:18:17 crc kubenswrapper[4876]: E1215 10:18:17.072779 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerName="registry-server" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.072785 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerName="registry-server" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.072971 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e4c7322-1b07-4628-85ab-5f72f6f44e04" containerName="tempest-tests-tempest-tests-runner" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.072991 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="da97a9c9-98d2-458c-9e9b-321af6459e36" containerName="registry-server" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.073819 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.081427 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-qgf6m" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.111643 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.146494 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxgcs\" (UniqueName: \"kubernetes.io/projected/e81a48d9-360b-467f-9756-d0bfd775042e-kube-api-access-pxgcs\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e81a48d9-360b-467f-9756-d0bfd775042e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.146685 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e81a48d9-360b-467f-9756-d0bfd775042e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.248356 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxgcs\" (UniqueName: \"kubernetes.io/projected/e81a48d9-360b-467f-9756-d0bfd775042e-kube-api-access-pxgcs\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e81a48d9-360b-467f-9756-d0bfd775042e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.248557 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e81a48d9-360b-467f-9756-d0bfd775042e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.249061 4876 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e81a48d9-360b-467f-9756-d0bfd775042e\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.270342 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxgcs\" (UniqueName: \"kubernetes.io/projected/e81a48d9-360b-467f-9756-d0bfd775042e-kube-api-access-pxgcs\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e81a48d9-360b-467f-9756-d0bfd775042e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.290093 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e81a48d9-360b-467f-9756-d0bfd775042e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.398769 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 15 10:18:17 crc kubenswrapper[4876]: I1215 10:18:17.858033 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 15 10:18:18 crc kubenswrapper[4876]: I1215 10:18:18.382924 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"e81a48d9-360b-467f-9756-d0bfd775042e","Type":"ContainerStarted","Data":"0ffd03381d6c0b571168045609e2020ca4d224f3565ad1c399efd8567fc10ed2"} Dec 15 10:18:19 crc kubenswrapper[4876]: I1215 10:18:19.409490 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"e81a48d9-360b-467f-9756-d0bfd775042e","Type":"ContainerStarted","Data":"58b95a01001d8b1cdc19e7c0d036fcb5bf6d3f1207eef8c95b3eba7af61b14a7"} Dec 15 10:18:19 crc kubenswrapper[4876]: I1215 10:18:19.436484 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.526511142 podStartE2EDuration="2.436462675s" podCreationTimestamp="2025-12-15 10:18:17 +0000 UTC" firstStartedPulling="2025-12-15 10:18:17.85799047 +0000 UTC m=+12423.429133381" lastFinishedPulling="2025-12-15 10:18:18.767942003 +0000 UTC m=+12424.339084914" observedRunningTime="2025-12-15 10:18:19.42357273 +0000 UTC m=+12424.994715671" watchObservedRunningTime="2025-12-15 10:18:19.436462675 +0000 UTC m=+12425.007605596" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.390969 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9zspp/must-gather-4m6px"] Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.393222 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/must-gather-4m6px" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.395549 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9zspp"/"openshift-service-ca.crt" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.395874 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9zspp"/"default-dockercfg-rgr65" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.398457 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9zspp"/"kube-root-ca.crt" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.411872 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9zspp/must-gather-4m6px"] Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.464259 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f3b4535d-a931-4343-bfdd-546269700b44-must-gather-output\") pod \"must-gather-4m6px\" (UID: \"f3b4535d-a931-4343-bfdd-546269700b44\") " pod="openshift-must-gather-9zspp/must-gather-4m6px" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.464434 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pssvb\" (UniqueName: \"kubernetes.io/projected/f3b4535d-a931-4343-bfdd-546269700b44-kube-api-access-pssvb\") pod \"must-gather-4m6px\" (UID: \"f3b4535d-a931-4343-bfdd-546269700b44\") " pod="openshift-must-gather-9zspp/must-gather-4m6px" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.566677 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f3b4535d-a931-4343-bfdd-546269700b44-must-gather-output\") pod \"must-gather-4m6px\" (UID: \"f3b4535d-a931-4343-bfdd-546269700b44\") " pod="openshift-must-gather-9zspp/must-gather-4m6px" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.566799 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pssvb\" (UniqueName: \"kubernetes.io/projected/f3b4535d-a931-4343-bfdd-546269700b44-kube-api-access-pssvb\") pod \"must-gather-4m6px\" (UID: \"f3b4535d-a931-4343-bfdd-546269700b44\") " pod="openshift-must-gather-9zspp/must-gather-4m6px" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.567256 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f3b4535d-a931-4343-bfdd-546269700b44-must-gather-output\") pod \"must-gather-4m6px\" (UID: \"f3b4535d-a931-4343-bfdd-546269700b44\") " pod="openshift-must-gather-9zspp/must-gather-4m6px" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.599191 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pssvb\" (UniqueName: \"kubernetes.io/projected/f3b4535d-a931-4343-bfdd-546269700b44-kube-api-access-pssvb\") pod \"must-gather-4m6px\" (UID: \"f3b4535d-a931-4343-bfdd-546269700b44\") " pod="openshift-must-gather-9zspp/must-gather-4m6px" Dec 15 10:19:45 crc kubenswrapper[4876]: I1215 10:19:45.716418 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/must-gather-4m6px" Dec 15 10:19:46 crc kubenswrapper[4876]: I1215 10:19:46.219807 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9zspp/must-gather-4m6px"] Dec 15 10:19:46 crc kubenswrapper[4876]: I1215 10:19:46.333759 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/must-gather-4m6px" event={"ID":"f3b4535d-a931-4343-bfdd-546269700b44","Type":"ContainerStarted","Data":"95ecc6f86bce899b2d3e836060311c32cc4fc779332d74a40ceda527f5a1ec94"} Dec 15 10:19:53 crc kubenswrapper[4876]: I1215 10:19:53.418497 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/must-gather-4m6px" event={"ID":"f3b4535d-a931-4343-bfdd-546269700b44","Type":"ContainerStarted","Data":"a7a2270af194e951e4317bed9d7f6bbae5b519496399b42e0eb3d827fc01a506"} Dec 15 10:19:53 crc kubenswrapper[4876]: I1215 10:19:53.418990 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/must-gather-4m6px" event={"ID":"f3b4535d-a931-4343-bfdd-546269700b44","Type":"ContainerStarted","Data":"7469f7d336bc8ca3627424efd52d529ed4433a60b9fee28edab3c044ef8f8df4"} Dec 15 10:19:53 crc kubenswrapper[4876]: I1215 10:19:53.441940 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9zspp/must-gather-4m6px" podStartSLOduration=2.253165336 podStartE2EDuration="8.441919798s" podCreationTimestamp="2025-12-15 10:19:45 +0000 UTC" firstStartedPulling="2025-12-15 10:19:46.229738746 +0000 UTC m=+12511.800881657" lastFinishedPulling="2025-12-15 10:19:52.418493168 +0000 UTC m=+12517.989636119" observedRunningTime="2025-12-15 10:19:53.433786891 +0000 UTC m=+12519.004929812" watchObservedRunningTime="2025-12-15 10:19:53.441919798 +0000 UTC m=+12519.013062709" Dec 15 10:19:56 crc kubenswrapper[4876]: E1215 10:19:56.426626 4876 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.70:56444->38.102.83.70:35145: write tcp 38.102.83.70:56444->38.102.83.70:35145: write: connection reset by peer Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.184049 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9zspp/crc-debug-4mwct"] Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.185973 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-4mwct" Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.214412 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c2c42034-e4a2-4651-9b5d-e51fd11802a6-host\") pod \"crc-debug-4mwct\" (UID: \"c2c42034-e4a2-4651-9b5d-e51fd11802a6\") " pod="openshift-must-gather-9zspp/crc-debug-4mwct" Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.214754 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54mdt\" (UniqueName: \"kubernetes.io/projected/c2c42034-e4a2-4651-9b5d-e51fd11802a6-kube-api-access-54mdt\") pod \"crc-debug-4mwct\" (UID: \"c2c42034-e4a2-4651-9b5d-e51fd11802a6\") " pod="openshift-must-gather-9zspp/crc-debug-4mwct" Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.316875 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c2c42034-e4a2-4651-9b5d-e51fd11802a6-host\") pod \"crc-debug-4mwct\" (UID: \"c2c42034-e4a2-4651-9b5d-e51fd11802a6\") " pod="openshift-must-gather-9zspp/crc-debug-4mwct" Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.316937 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54mdt\" (UniqueName: \"kubernetes.io/projected/c2c42034-e4a2-4651-9b5d-e51fd11802a6-kube-api-access-54mdt\") pod \"crc-debug-4mwct\" (UID: \"c2c42034-e4a2-4651-9b5d-e51fd11802a6\") " pod="openshift-must-gather-9zspp/crc-debug-4mwct" Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.317019 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c2c42034-e4a2-4651-9b5d-e51fd11802a6-host\") pod \"crc-debug-4mwct\" (UID: \"c2c42034-e4a2-4651-9b5d-e51fd11802a6\") " pod="openshift-must-gather-9zspp/crc-debug-4mwct" Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.323004 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.323298 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.339100 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54mdt\" (UniqueName: \"kubernetes.io/projected/c2c42034-e4a2-4651-9b5d-e51fd11802a6-kube-api-access-54mdt\") pod \"crc-debug-4mwct\" (UID: \"c2c42034-e4a2-4651-9b5d-e51fd11802a6\") " pod="openshift-must-gather-9zspp/crc-debug-4mwct" Dec 15 10:19:57 crc kubenswrapper[4876]: I1215 10:19:57.509149 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-4mwct" Dec 15 10:19:57 crc kubenswrapper[4876]: W1215 10:19:57.551001 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2c42034_e4a2_4651_9b5d_e51fd11802a6.slice/crio-186865e6b612352539bf630569dc1eab446864121c5091baf9ea11bd03e584f1 WatchSource:0}: Error finding container 186865e6b612352539bf630569dc1eab446864121c5091baf9ea11bd03e584f1: Status 404 returned error can't find the container with id 186865e6b612352539bf630569dc1eab446864121c5091baf9ea11bd03e584f1 Dec 15 10:19:58 crc kubenswrapper[4876]: I1215 10:19:58.477697 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/crc-debug-4mwct" event={"ID":"c2c42034-e4a2-4651-9b5d-e51fd11802a6","Type":"ContainerStarted","Data":"186865e6b612352539bf630569dc1eab446864121c5091baf9ea11bd03e584f1"} Dec 15 10:20:08 crc kubenswrapper[4876]: I1215 10:20:08.597629 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/crc-debug-4mwct" event={"ID":"c2c42034-e4a2-4651-9b5d-e51fd11802a6","Type":"ContainerStarted","Data":"aafc3220d5e2501833105ba5fd9d54aeb3ba21997293667266c1dc39f39561ce"} Dec 15 10:20:08 crc kubenswrapper[4876]: I1215 10:20:08.621712 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9zspp/crc-debug-4mwct" podStartSLOduration=1.826451617 podStartE2EDuration="11.62167515s" podCreationTimestamp="2025-12-15 10:19:57 +0000 UTC" firstStartedPulling="2025-12-15 10:19:57.554037163 +0000 UTC m=+12523.125180074" lastFinishedPulling="2025-12-15 10:20:07.349260686 +0000 UTC m=+12532.920403607" observedRunningTime="2025-12-15 10:20:08.616558543 +0000 UTC m=+12534.187701454" watchObservedRunningTime="2025-12-15 10:20:08.62167515 +0000 UTC m=+12534.192818061" Dec 15 10:20:27 crc kubenswrapper[4876]: I1215 10:20:27.322856 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:20:27 crc kubenswrapper[4876]: I1215 10:20:27.323433 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:20:53 crc kubenswrapper[4876]: I1215 10:20:53.181793 4876 generic.go:334] "Generic (PLEG): container finished" podID="c2c42034-e4a2-4651-9b5d-e51fd11802a6" containerID="aafc3220d5e2501833105ba5fd9d54aeb3ba21997293667266c1dc39f39561ce" exitCode=0 Dec 15 10:20:53 crc kubenswrapper[4876]: I1215 10:20:53.181892 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/crc-debug-4mwct" event={"ID":"c2c42034-e4a2-4651-9b5d-e51fd11802a6","Type":"ContainerDied","Data":"aafc3220d5e2501833105ba5fd9d54aeb3ba21997293667266c1dc39f39561ce"} Dec 15 10:20:54 crc kubenswrapper[4876]: I1215 10:20:54.338154 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-4mwct" Dec 15 10:20:54 crc kubenswrapper[4876]: I1215 10:20:54.381974 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9zspp/crc-debug-4mwct"] Dec 15 10:20:54 crc kubenswrapper[4876]: I1215 10:20:54.392772 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9zspp/crc-debug-4mwct"] Dec 15 10:20:54 crc kubenswrapper[4876]: I1215 10:20:54.458209 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54mdt\" (UniqueName: \"kubernetes.io/projected/c2c42034-e4a2-4651-9b5d-e51fd11802a6-kube-api-access-54mdt\") pod \"c2c42034-e4a2-4651-9b5d-e51fd11802a6\" (UID: \"c2c42034-e4a2-4651-9b5d-e51fd11802a6\") " Dec 15 10:20:54 crc kubenswrapper[4876]: I1215 10:20:54.458393 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c2c42034-e4a2-4651-9b5d-e51fd11802a6-host\") pod \"c2c42034-e4a2-4651-9b5d-e51fd11802a6\" (UID: \"c2c42034-e4a2-4651-9b5d-e51fd11802a6\") " Dec 15 10:20:54 crc kubenswrapper[4876]: I1215 10:20:54.458517 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c2c42034-e4a2-4651-9b5d-e51fd11802a6-host" (OuterVolumeSpecName: "host") pod "c2c42034-e4a2-4651-9b5d-e51fd11802a6" (UID: "c2c42034-e4a2-4651-9b5d-e51fd11802a6"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 10:20:54 crc kubenswrapper[4876]: I1215 10:20:54.458830 4876 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c2c42034-e4a2-4651-9b5d-e51fd11802a6-host\") on node \"crc\" DevicePath \"\"" Dec 15 10:20:54 crc kubenswrapper[4876]: I1215 10:20:54.468322 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2c42034-e4a2-4651-9b5d-e51fd11802a6-kube-api-access-54mdt" (OuterVolumeSpecName: "kube-api-access-54mdt") pod "c2c42034-e4a2-4651-9b5d-e51fd11802a6" (UID: "c2c42034-e4a2-4651-9b5d-e51fd11802a6"). InnerVolumeSpecName "kube-api-access-54mdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:20:54 crc kubenswrapper[4876]: I1215 10:20:54.561147 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54mdt\" (UniqueName: \"kubernetes.io/projected/c2c42034-e4a2-4651-9b5d-e51fd11802a6-kube-api-access-54mdt\") on node \"crc\" DevicePath \"\"" Dec 15 10:20:54 crc kubenswrapper[4876]: I1215 10:20:54.722711 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2c42034-e4a2-4651-9b5d-e51fd11802a6" path="/var/lib/kubelet/pods/c2c42034-e4a2-4651-9b5d-e51fd11802a6/volumes" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.209089 4876 scope.go:117] "RemoveContainer" containerID="aafc3220d5e2501833105ba5fd9d54aeb3ba21997293667266c1dc39f39561ce" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.209183 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-4mwct" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.565682 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9zspp/crc-debug-xm76v"] Dec 15 10:20:55 crc kubenswrapper[4876]: E1215 10:20:55.566340 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2c42034-e4a2-4651-9b5d-e51fd11802a6" containerName="container-00" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.566352 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2c42034-e4a2-4651-9b5d-e51fd11802a6" containerName="container-00" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.566548 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2c42034-e4a2-4651-9b5d-e51fd11802a6" containerName="container-00" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.567223 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-xm76v" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.683454 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aa7526eb-a31c-4f67-a569-c4902619e7a1-host\") pod \"crc-debug-xm76v\" (UID: \"aa7526eb-a31c-4f67-a569-c4902619e7a1\") " pod="openshift-must-gather-9zspp/crc-debug-xm76v" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.683938 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw922\" (UniqueName: \"kubernetes.io/projected/aa7526eb-a31c-4f67-a569-c4902619e7a1-kube-api-access-nw922\") pod \"crc-debug-xm76v\" (UID: \"aa7526eb-a31c-4f67-a569-c4902619e7a1\") " pod="openshift-must-gather-9zspp/crc-debug-xm76v" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.787398 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw922\" (UniqueName: \"kubernetes.io/projected/aa7526eb-a31c-4f67-a569-c4902619e7a1-kube-api-access-nw922\") pod \"crc-debug-xm76v\" (UID: \"aa7526eb-a31c-4f67-a569-c4902619e7a1\") " pod="openshift-must-gather-9zspp/crc-debug-xm76v" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.788183 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aa7526eb-a31c-4f67-a569-c4902619e7a1-host\") pod \"crc-debug-xm76v\" (UID: \"aa7526eb-a31c-4f67-a569-c4902619e7a1\") " pod="openshift-must-gather-9zspp/crc-debug-xm76v" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.788325 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aa7526eb-a31c-4f67-a569-c4902619e7a1-host\") pod \"crc-debug-xm76v\" (UID: \"aa7526eb-a31c-4f67-a569-c4902619e7a1\") " pod="openshift-must-gather-9zspp/crc-debug-xm76v" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.805988 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw922\" (UniqueName: \"kubernetes.io/projected/aa7526eb-a31c-4f67-a569-c4902619e7a1-kube-api-access-nw922\") pod \"crc-debug-xm76v\" (UID: \"aa7526eb-a31c-4f67-a569-c4902619e7a1\") " pod="openshift-must-gather-9zspp/crc-debug-xm76v" Dec 15 10:20:55 crc kubenswrapper[4876]: I1215 10:20:55.886294 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-xm76v" Dec 15 10:20:56 crc kubenswrapper[4876]: I1215 10:20:56.220962 4876 generic.go:334] "Generic (PLEG): container finished" podID="aa7526eb-a31c-4f67-a569-c4902619e7a1" containerID="b78bd12a76719b062e99abad091a54062cab3f396e31181dafd483caa9dc107d" exitCode=0 Dec 15 10:20:56 crc kubenswrapper[4876]: I1215 10:20:56.221047 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/crc-debug-xm76v" event={"ID":"aa7526eb-a31c-4f67-a569-c4902619e7a1","Type":"ContainerDied","Data":"b78bd12a76719b062e99abad091a54062cab3f396e31181dafd483caa9dc107d"} Dec 15 10:20:56 crc kubenswrapper[4876]: I1215 10:20:56.221449 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/crc-debug-xm76v" event={"ID":"aa7526eb-a31c-4f67-a569-c4902619e7a1","Type":"ContainerStarted","Data":"f5ebb29bc174b1f7f09bea11240c16843ea21caa3be3651403f2f3b1d11bbade"} Dec 15 10:20:56 crc kubenswrapper[4876]: I1215 10:20:56.895969 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9zspp/crc-debug-xm76v"] Dec 15 10:20:56 crc kubenswrapper[4876]: I1215 10:20:56.905025 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9zspp/crc-debug-xm76v"] Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.322790 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.322866 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.322918 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.323585 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.323658 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" gracePeriod=600 Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.337877 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-xm76v" Dec 15 10:20:57 crc kubenswrapper[4876]: E1215 10:20:57.446228 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.520332 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw922\" (UniqueName: \"kubernetes.io/projected/aa7526eb-a31c-4f67-a569-c4902619e7a1-kube-api-access-nw922\") pod \"aa7526eb-a31c-4f67-a569-c4902619e7a1\" (UID: \"aa7526eb-a31c-4f67-a569-c4902619e7a1\") " Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.520726 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aa7526eb-a31c-4f67-a569-c4902619e7a1-host\") pod \"aa7526eb-a31c-4f67-a569-c4902619e7a1\" (UID: \"aa7526eb-a31c-4f67-a569-c4902619e7a1\") " Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.520806 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aa7526eb-a31c-4f67-a569-c4902619e7a1-host" (OuterVolumeSpecName: "host") pod "aa7526eb-a31c-4f67-a569-c4902619e7a1" (UID: "aa7526eb-a31c-4f67-a569-c4902619e7a1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.521796 4876 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aa7526eb-a31c-4f67-a569-c4902619e7a1-host\") on node \"crc\" DevicePath \"\"" Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.525400 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa7526eb-a31c-4f67-a569-c4902619e7a1-kube-api-access-nw922" (OuterVolumeSpecName: "kube-api-access-nw922") pod "aa7526eb-a31c-4f67-a569-c4902619e7a1" (UID: "aa7526eb-a31c-4f67-a569-c4902619e7a1"). InnerVolumeSpecName "kube-api-access-nw922". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:20:57 crc kubenswrapper[4876]: I1215 10:20:57.623843 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw922\" (UniqueName: \"kubernetes.io/projected/aa7526eb-a31c-4f67-a569-c4902619e7a1-kube-api-access-nw922\") on node \"crc\" DevicePath \"\"" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.155239 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9zspp/crc-debug-28hv4"] Dec 15 10:20:58 crc kubenswrapper[4876]: E1215 10:20:58.155763 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa7526eb-a31c-4f67-a569-c4902619e7a1" containerName="container-00" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.155777 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa7526eb-a31c-4f67-a569-c4902619e7a1" containerName="container-00" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.156028 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa7526eb-a31c-4f67-a569-c4902619e7a1" containerName="container-00" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.156869 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-28hv4" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.249439 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-xm76v" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.249474 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5ebb29bc174b1f7f09bea11240c16843ea21caa3be3651403f2f3b1d11bbade" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.254542 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" exitCode=0 Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.254615 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf"} Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.254809 4876 scope.go:117] "RemoveContainer" containerID="e142074ed754ca9a14591d4d8825d6a9ea9fe8facd966263e511d53ff988acd0" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.255549 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:20:58 crc kubenswrapper[4876]: E1215 10:20:58.255974 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.338770 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/97dda1ac-762f-4426-9360-248c07fc6512-host\") pod \"crc-debug-28hv4\" (UID: \"97dda1ac-762f-4426-9360-248c07fc6512\") " pod="openshift-must-gather-9zspp/crc-debug-28hv4" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.339234 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prnxz\" (UniqueName: \"kubernetes.io/projected/97dda1ac-762f-4426-9360-248c07fc6512-kube-api-access-prnxz\") pod \"crc-debug-28hv4\" (UID: \"97dda1ac-762f-4426-9360-248c07fc6512\") " pod="openshift-must-gather-9zspp/crc-debug-28hv4" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.441249 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prnxz\" (UniqueName: \"kubernetes.io/projected/97dda1ac-762f-4426-9360-248c07fc6512-kube-api-access-prnxz\") pod \"crc-debug-28hv4\" (UID: \"97dda1ac-762f-4426-9360-248c07fc6512\") " pod="openshift-must-gather-9zspp/crc-debug-28hv4" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.441759 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/97dda1ac-762f-4426-9360-248c07fc6512-host\") pod \"crc-debug-28hv4\" (UID: \"97dda1ac-762f-4426-9360-248c07fc6512\") " pod="openshift-must-gather-9zspp/crc-debug-28hv4" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.442363 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/97dda1ac-762f-4426-9360-248c07fc6512-host\") pod \"crc-debug-28hv4\" (UID: \"97dda1ac-762f-4426-9360-248c07fc6512\") " pod="openshift-must-gather-9zspp/crc-debug-28hv4" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.459302 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prnxz\" (UniqueName: \"kubernetes.io/projected/97dda1ac-762f-4426-9360-248c07fc6512-kube-api-access-prnxz\") pod \"crc-debug-28hv4\" (UID: \"97dda1ac-762f-4426-9360-248c07fc6512\") " pod="openshift-must-gather-9zspp/crc-debug-28hv4" Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.478363 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-28hv4" Dec 15 10:20:58 crc kubenswrapper[4876]: W1215 10:20:58.519970 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97dda1ac_762f_4426_9360_248c07fc6512.slice/crio-60d67e31957ad0f69bcebcc0700d6c1e39e8975a08aac29149efdcb2c8950417 WatchSource:0}: Error finding container 60d67e31957ad0f69bcebcc0700d6c1e39e8975a08aac29149efdcb2c8950417: Status 404 returned error can't find the container with id 60d67e31957ad0f69bcebcc0700d6c1e39e8975a08aac29149efdcb2c8950417 Dec 15 10:20:58 crc kubenswrapper[4876]: I1215 10:20:58.735658 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa7526eb-a31c-4f67-a569-c4902619e7a1" path="/var/lib/kubelet/pods/aa7526eb-a31c-4f67-a569-c4902619e7a1/volumes" Dec 15 10:20:59 crc kubenswrapper[4876]: I1215 10:20:59.270148 4876 generic.go:334] "Generic (PLEG): container finished" podID="97dda1ac-762f-4426-9360-248c07fc6512" containerID="f6c2a33dce0fa42c1ad8659847d10ddefd9a8833b7c6994fbdd63daaf6b77b06" exitCode=0 Dec 15 10:20:59 crc kubenswrapper[4876]: I1215 10:20:59.270186 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/crc-debug-28hv4" event={"ID":"97dda1ac-762f-4426-9360-248c07fc6512","Type":"ContainerDied","Data":"f6c2a33dce0fa42c1ad8659847d10ddefd9a8833b7c6994fbdd63daaf6b77b06"} Dec 15 10:20:59 crc kubenswrapper[4876]: I1215 10:20:59.270486 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/crc-debug-28hv4" event={"ID":"97dda1ac-762f-4426-9360-248c07fc6512","Type":"ContainerStarted","Data":"60d67e31957ad0f69bcebcc0700d6c1e39e8975a08aac29149efdcb2c8950417"} Dec 15 10:20:59 crc kubenswrapper[4876]: I1215 10:20:59.322834 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9zspp/crc-debug-28hv4"] Dec 15 10:20:59 crc kubenswrapper[4876]: I1215 10:20:59.332389 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9zspp/crc-debug-28hv4"] Dec 15 10:21:00 crc kubenswrapper[4876]: I1215 10:21:00.399542 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-28hv4" Dec 15 10:21:00 crc kubenswrapper[4876]: I1215 10:21:00.589987 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prnxz\" (UniqueName: \"kubernetes.io/projected/97dda1ac-762f-4426-9360-248c07fc6512-kube-api-access-prnxz\") pod \"97dda1ac-762f-4426-9360-248c07fc6512\" (UID: \"97dda1ac-762f-4426-9360-248c07fc6512\") " Dec 15 10:21:00 crc kubenswrapper[4876]: I1215 10:21:00.590434 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/97dda1ac-762f-4426-9360-248c07fc6512-host\") pod \"97dda1ac-762f-4426-9360-248c07fc6512\" (UID: \"97dda1ac-762f-4426-9360-248c07fc6512\") " Dec 15 10:21:00 crc kubenswrapper[4876]: I1215 10:21:00.590577 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/97dda1ac-762f-4426-9360-248c07fc6512-host" (OuterVolumeSpecName: "host") pod "97dda1ac-762f-4426-9360-248c07fc6512" (UID: "97dda1ac-762f-4426-9360-248c07fc6512"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 15 10:21:00 crc kubenswrapper[4876]: I1215 10:21:00.591241 4876 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/97dda1ac-762f-4426-9360-248c07fc6512-host\") on node \"crc\" DevicePath \"\"" Dec 15 10:21:00 crc kubenswrapper[4876]: I1215 10:21:00.595386 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97dda1ac-762f-4426-9360-248c07fc6512-kube-api-access-prnxz" (OuterVolumeSpecName: "kube-api-access-prnxz") pod "97dda1ac-762f-4426-9360-248c07fc6512" (UID: "97dda1ac-762f-4426-9360-248c07fc6512"). InnerVolumeSpecName "kube-api-access-prnxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:21:00 crc kubenswrapper[4876]: I1215 10:21:00.693366 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prnxz\" (UniqueName: \"kubernetes.io/projected/97dda1ac-762f-4426-9360-248c07fc6512-kube-api-access-prnxz\") on node \"crc\" DevicePath \"\"" Dec 15 10:21:00 crc kubenswrapper[4876]: I1215 10:21:00.726957 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97dda1ac-762f-4426-9360-248c07fc6512" path="/var/lib/kubelet/pods/97dda1ac-762f-4426-9360-248c07fc6512/volumes" Dec 15 10:21:01 crc kubenswrapper[4876]: I1215 10:21:01.290919 4876 scope.go:117] "RemoveContainer" containerID="f6c2a33dce0fa42c1ad8659847d10ddefd9a8833b7c6994fbdd63daaf6b77b06" Dec 15 10:21:01 crc kubenswrapper[4876]: I1215 10:21:01.291002 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/crc-debug-28hv4" Dec 15 10:21:03 crc kubenswrapper[4876]: I1215 10:21:03.864151 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6x5db"] Dec 15 10:21:03 crc kubenswrapper[4876]: E1215 10:21:03.864893 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97dda1ac-762f-4426-9360-248c07fc6512" containerName="container-00" Dec 15 10:21:03 crc kubenswrapper[4876]: I1215 10:21:03.864907 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="97dda1ac-762f-4426-9360-248c07fc6512" containerName="container-00" Dec 15 10:21:03 crc kubenswrapper[4876]: I1215 10:21:03.865099 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="97dda1ac-762f-4426-9360-248c07fc6512" containerName="container-00" Dec 15 10:21:03 crc kubenswrapper[4876]: I1215 10:21:03.866643 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:03 crc kubenswrapper[4876]: I1215 10:21:03.882467 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6x5db"] Dec 15 10:21:03 crc kubenswrapper[4876]: I1215 10:21:03.971638 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-catalog-content\") pod \"redhat-marketplace-6x5db\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:03 crc kubenswrapper[4876]: I1215 10:21:03.971727 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zt8m\" (UniqueName: \"kubernetes.io/projected/f61bf7bd-db76-435c-8763-01a175971adf-kube-api-access-2zt8m\") pod \"redhat-marketplace-6x5db\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:03 crc kubenswrapper[4876]: I1215 10:21:03.971796 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-utilities\") pod \"redhat-marketplace-6x5db\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:04 crc kubenswrapper[4876]: I1215 10:21:04.073452 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-catalog-content\") pod \"redhat-marketplace-6x5db\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:04 crc kubenswrapper[4876]: I1215 10:21:04.073520 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zt8m\" (UniqueName: \"kubernetes.io/projected/f61bf7bd-db76-435c-8763-01a175971adf-kube-api-access-2zt8m\") pod \"redhat-marketplace-6x5db\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:04 crc kubenswrapper[4876]: I1215 10:21:04.073563 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-utilities\") pod \"redhat-marketplace-6x5db\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:04 crc kubenswrapper[4876]: I1215 10:21:04.073956 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-utilities\") pod \"redhat-marketplace-6x5db\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:04 crc kubenswrapper[4876]: I1215 10:21:04.074221 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-catalog-content\") pod \"redhat-marketplace-6x5db\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:04 crc kubenswrapper[4876]: I1215 10:21:04.107024 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zt8m\" (UniqueName: \"kubernetes.io/projected/f61bf7bd-db76-435c-8763-01a175971adf-kube-api-access-2zt8m\") pod \"redhat-marketplace-6x5db\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:04 crc kubenswrapper[4876]: I1215 10:21:04.185905 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:04 crc kubenswrapper[4876]: I1215 10:21:04.747771 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6x5db"] Dec 15 10:21:05 crc kubenswrapper[4876]: I1215 10:21:05.343338 4876 generic.go:334] "Generic (PLEG): container finished" podID="f61bf7bd-db76-435c-8763-01a175971adf" containerID="b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157" exitCode=0 Dec 15 10:21:05 crc kubenswrapper[4876]: I1215 10:21:05.343432 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6x5db" event={"ID":"f61bf7bd-db76-435c-8763-01a175971adf","Type":"ContainerDied","Data":"b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157"} Dec 15 10:21:05 crc kubenswrapper[4876]: I1215 10:21:05.343601 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6x5db" event={"ID":"f61bf7bd-db76-435c-8763-01a175971adf","Type":"ContainerStarted","Data":"e352556925056683496bf4a7be195bbb526845f06c26c63505c015504ddf8bdb"} Dec 15 10:21:08 crc kubenswrapper[4876]: I1215 10:21:08.374436 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6x5db" event={"ID":"f61bf7bd-db76-435c-8763-01a175971adf","Type":"ContainerStarted","Data":"e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90"} Dec 15 10:21:09 crc kubenswrapper[4876]: I1215 10:21:09.385254 4876 generic.go:334] "Generic (PLEG): container finished" podID="f61bf7bd-db76-435c-8763-01a175971adf" containerID="e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90" exitCode=0 Dec 15 10:21:09 crc kubenswrapper[4876]: I1215 10:21:09.385606 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6x5db" event={"ID":"f61bf7bd-db76-435c-8763-01a175971adf","Type":"ContainerDied","Data":"e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90"} Dec 15 10:21:10 crc kubenswrapper[4876]: I1215 10:21:10.405058 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6x5db" event={"ID":"f61bf7bd-db76-435c-8763-01a175971adf","Type":"ContainerStarted","Data":"454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9"} Dec 15 10:21:10 crc kubenswrapper[4876]: I1215 10:21:10.428605 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6x5db" podStartSLOduration=2.909527374 podStartE2EDuration="7.42858601s" podCreationTimestamp="2025-12-15 10:21:03 +0000 UTC" firstStartedPulling="2025-12-15 10:21:05.345339795 +0000 UTC m=+12590.916482706" lastFinishedPulling="2025-12-15 10:21:09.864398431 +0000 UTC m=+12595.435541342" observedRunningTime="2025-12-15 10:21:10.425093396 +0000 UTC m=+12595.996236327" watchObservedRunningTime="2025-12-15 10:21:10.42858601 +0000 UTC m=+12595.999728941" Dec 15 10:21:12 crc kubenswrapper[4876]: I1215 10:21:12.707213 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:21:12 crc kubenswrapper[4876]: E1215 10:21:12.708218 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:21:14 crc kubenswrapper[4876]: I1215 10:21:14.186010 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:14 crc kubenswrapper[4876]: I1215 10:21:14.186339 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:14 crc kubenswrapper[4876]: I1215 10:21:14.237366 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:24 crc kubenswrapper[4876]: I1215 10:21:24.237803 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:24 crc kubenswrapper[4876]: I1215 10:21:24.292289 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6x5db"] Dec 15 10:21:24 crc kubenswrapper[4876]: I1215 10:21:24.576881 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6x5db" podUID="f61bf7bd-db76-435c-8763-01a175971adf" containerName="registry-server" containerID="cri-o://454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9" gracePeriod=2 Dec 15 10:21:24 crc kubenswrapper[4876]: I1215 10:21:24.718023 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:21:24 crc kubenswrapper[4876]: E1215 10:21:24.718346 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.097534 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.135031 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-catalog-content\") pod \"f61bf7bd-db76-435c-8763-01a175971adf\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.135117 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zt8m\" (UniqueName: \"kubernetes.io/projected/f61bf7bd-db76-435c-8763-01a175971adf-kube-api-access-2zt8m\") pod \"f61bf7bd-db76-435c-8763-01a175971adf\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.135234 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-utilities\") pod \"f61bf7bd-db76-435c-8763-01a175971adf\" (UID: \"f61bf7bd-db76-435c-8763-01a175971adf\") " Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.136706 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-utilities" (OuterVolumeSpecName: "utilities") pod "f61bf7bd-db76-435c-8763-01a175971adf" (UID: "f61bf7bd-db76-435c-8763-01a175971adf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.147522 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f61bf7bd-db76-435c-8763-01a175971adf-kube-api-access-2zt8m" (OuterVolumeSpecName: "kube-api-access-2zt8m") pod "f61bf7bd-db76-435c-8763-01a175971adf" (UID: "f61bf7bd-db76-435c-8763-01a175971adf"). InnerVolumeSpecName "kube-api-access-2zt8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.164731 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f61bf7bd-db76-435c-8763-01a175971adf" (UID: "f61bf7bd-db76-435c-8763-01a175971adf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.238590 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.238673 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zt8m\" (UniqueName: \"kubernetes.io/projected/f61bf7bd-db76-435c-8763-01a175971adf-kube-api-access-2zt8m\") on node \"crc\" DevicePath \"\"" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.238702 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f61bf7bd-db76-435c-8763-01a175971adf-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.591249 4876 generic.go:334] "Generic (PLEG): container finished" podID="f61bf7bd-db76-435c-8763-01a175971adf" containerID="454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9" exitCode=0 Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.591289 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6x5db" event={"ID":"f61bf7bd-db76-435c-8763-01a175971adf","Type":"ContainerDied","Data":"454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9"} Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.591316 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6x5db" event={"ID":"f61bf7bd-db76-435c-8763-01a175971adf","Type":"ContainerDied","Data":"e352556925056683496bf4a7be195bbb526845f06c26c63505c015504ddf8bdb"} Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.591333 4876 scope.go:117] "RemoveContainer" containerID="454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.591458 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6x5db" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.641660 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6x5db"] Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.643400 4876 scope.go:117] "RemoveContainer" containerID="e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.668338 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6x5db"] Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.669665 4876 scope.go:117] "RemoveContainer" containerID="b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.752062 4876 scope.go:117] "RemoveContainer" containerID="454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9" Dec 15 10:21:25 crc kubenswrapper[4876]: E1215 10:21:25.752652 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9\": container with ID starting with 454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9 not found: ID does not exist" containerID="454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.752729 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9"} err="failed to get container status \"454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9\": rpc error: code = NotFound desc = could not find container \"454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9\": container with ID starting with 454ae9304be21e1f17385f2d548cf200d6b74ebe4e1950bab6e67dcdeba8d7b9 not found: ID does not exist" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.752762 4876 scope.go:117] "RemoveContainer" containerID="e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90" Dec 15 10:21:25 crc kubenswrapper[4876]: E1215 10:21:25.753051 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90\": container with ID starting with e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90 not found: ID does not exist" containerID="e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.753086 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90"} err="failed to get container status \"e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90\": rpc error: code = NotFound desc = could not find container \"e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90\": container with ID starting with e319ebb4143b92013f078397edc6ceb29b3518fdd2ceec0f9bbfe194d3c5fe90 not found: ID does not exist" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.753120 4876 scope.go:117] "RemoveContainer" containerID="b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157" Dec 15 10:21:25 crc kubenswrapper[4876]: E1215 10:21:25.753539 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157\": container with ID starting with b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157 not found: ID does not exist" containerID="b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157" Dec 15 10:21:25 crc kubenswrapper[4876]: I1215 10:21:25.753574 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157"} err="failed to get container status \"b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157\": rpc error: code = NotFound desc = could not find container \"b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157\": container with ID starting with b237c9ede7f9d7940fa02959b1b2cd59cc3d870ea2a976de74073354e6ba6157 not found: ID does not exist" Dec 15 10:21:26 crc kubenswrapper[4876]: I1215 10:21:26.724059 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f61bf7bd-db76-435c-8763-01a175971adf" path="/var/lib/kubelet/pods/f61bf7bd-db76-435c-8763-01a175971adf/volumes" Dec 15 10:21:35 crc kubenswrapper[4876]: I1215 10:21:35.706137 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:21:35 crc kubenswrapper[4876]: E1215 10:21:35.706929 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:21:47 crc kubenswrapper[4876]: I1215 10:21:47.705827 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:21:47 crc kubenswrapper[4876]: E1215 10:21:47.707608 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:21:59 crc kubenswrapper[4876]: I1215 10:21:59.705949 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:21:59 crc kubenswrapper[4876]: E1215 10:21:59.708936 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:22:12 crc kubenswrapper[4876]: I1215 10:22:12.707135 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:22:12 crc kubenswrapper[4876]: E1215 10:22:12.707891 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.270970 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-w98kl"] Dec 15 10:22:18 crc kubenswrapper[4876]: E1215 10:22:18.272282 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f61bf7bd-db76-435c-8763-01a175971adf" containerName="registry-server" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.272306 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f61bf7bd-db76-435c-8763-01a175971adf" containerName="registry-server" Dec 15 10:22:18 crc kubenswrapper[4876]: E1215 10:22:18.272335 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f61bf7bd-db76-435c-8763-01a175971adf" containerName="extract-content" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.272343 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f61bf7bd-db76-435c-8763-01a175971adf" containerName="extract-content" Dec 15 10:22:18 crc kubenswrapper[4876]: E1215 10:22:18.272370 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f61bf7bd-db76-435c-8763-01a175971adf" containerName="extract-utilities" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.272378 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f61bf7bd-db76-435c-8763-01a175971adf" containerName="extract-utilities" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.272661 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f61bf7bd-db76-435c-8763-01a175971adf" containerName="registry-server" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.274726 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.301091 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w98kl"] Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.345348 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-catalog-content\") pod \"certified-operators-w98kl\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.345457 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw95k\" (UniqueName: \"kubernetes.io/projected/4a1d8db0-f12e-455d-b436-f755535886d6-kube-api-access-kw95k\") pod \"certified-operators-w98kl\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.345570 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-utilities\") pod \"certified-operators-w98kl\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.446667 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-utilities\") pod \"certified-operators-w98kl\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.446792 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-catalog-content\") pod \"certified-operators-w98kl\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.446836 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw95k\" (UniqueName: \"kubernetes.io/projected/4a1d8db0-f12e-455d-b436-f755535886d6-kube-api-access-kw95k\") pod \"certified-operators-w98kl\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.447210 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-utilities\") pod \"certified-operators-w98kl\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.447415 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-catalog-content\") pod \"certified-operators-w98kl\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.478133 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw95k\" (UniqueName: \"kubernetes.io/projected/4a1d8db0-f12e-455d-b436-f755535886d6-kube-api-access-kw95k\") pod \"certified-operators-w98kl\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:18 crc kubenswrapper[4876]: I1215 10:22:18.605851 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:19 crc kubenswrapper[4876]: I1215 10:22:19.183271 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w98kl"] Dec 15 10:22:19 crc kubenswrapper[4876]: I1215 10:22:19.199850 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w98kl" event={"ID":"4a1d8db0-f12e-455d-b436-f755535886d6","Type":"ContainerStarted","Data":"1a10bd34d039cfaddc2eb03560677b9fd6b733ce5bec571f4d79404f47827c3f"} Dec 15 10:22:20 crc kubenswrapper[4876]: I1215 10:22:20.211201 4876 generic.go:334] "Generic (PLEG): container finished" podID="4a1d8db0-f12e-455d-b436-f755535886d6" containerID="aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499" exitCode=0 Dec 15 10:22:20 crc kubenswrapper[4876]: I1215 10:22:20.211239 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w98kl" event={"ID":"4a1d8db0-f12e-455d-b436-f755535886d6","Type":"ContainerDied","Data":"aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499"} Dec 15 10:22:20 crc kubenswrapper[4876]: I1215 10:22:20.213435 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 10:22:22 crc kubenswrapper[4876]: I1215 10:22:22.276548 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w98kl" event={"ID":"4a1d8db0-f12e-455d-b436-f755535886d6","Type":"ContainerStarted","Data":"5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f"} Dec 15 10:22:23 crc kubenswrapper[4876]: I1215 10:22:23.289829 4876 generic.go:334] "Generic (PLEG): container finished" podID="4a1d8db0-f12e-455d-b436-f755535886d6" containerID="5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f" exitCode=0 Dec 15 10:22:23 crc kubenswrapper[4876]: I1215 10:22:23.289880 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w98kl" event={"ID":"4a1d8db0-f12e-455d-b436-f755535886d6","Type":"ContainerDied","Data":"5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f"} Dec 15 10:22:25 crc kubenswrapper[4876]: I1215 10:22:25.316574 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w98kl" event={"ID":"4a1d8db0-f12e-455d-b436-f755535886d6","Type":"ContainerStarted","Data":"a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a"} Dec 15 10:22:25 crc kubenswrapper[4876]: I1215 10:22:25.342024 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-w98kl" podStartSLOduration=3.079107234 podStartE2EDuration="7.341998203s" podCreationTimestamp="2025-12-15 10:22:18 +0000 UTC" firstStartedPulling="2025-12-15 10:22:20.213192859 +0000 UTC m=+12665.784335770" lastFinishedPulling="2025-12-15 10:22:24.476083828 +0000 UTC m=+12670.047226739" observedRunningTime="2025-12-15 10:22:25.339078485 +0000 UTC m=+12670.910221416" watchObservedRunningTime="2025-12-15 10:22:25.341998203 +0000 UTC m=+12670.913141114" Dec 15 10:22:25 crc kubenswrapper[4876]: I1215 10:22:25.705566 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:22:25 crc kubenswrapper[4876]: E1215 10:22:25.705865 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:22:28 crc kubenswrapper[4876]: I1215 10:22:28.606076 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:28 crc kubenswrapper[4876]: I1215 10:22:28.606702 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:28 crc kubenswrapper[4876]: I1215 10:22:28.657628 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:29 crc kubenswrapper[4876]: I1215 10:22:29.396405 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:29 crc kubenswrapper[4876]: I1215 10:22:29.460037 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w98kl"] Dec 15 10:22:31 crc kubenswrapper[4876]: I1215 10:22:31.374921 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-w98kl" podUID="4a1d8db0-f12e-455d-b436-f755535886d6" containerName="registry-server" containerID="cri-o://a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a" gracePeriod=2 Dec 15 10:22:31 crc kubenswrapper[4876]: I1215 10:22:31.924855 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.021418 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-catalog-content\") pod \"4a1d8db0-f12e-455d-b436-f755535886d6\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.021594 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kw95k\" (UniqueName: \"kubernetes.io/projected/4a1d8db0-f12e-455d-b436-f755535886d6-kube-api-access-kw95k\") pod \"4a1d8db0-f12e-455d-b436-f755535886d6\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.021813 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-utilities\") pod \"4a1d8db0-f12e-455d-b436-f755535886d6\" (UID: \"4a1d8db0-f12e-455d-b436-f755535886d6\") " Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.022406 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-utilities" (OuterVolumeSpecName: "utilities") pod "4a1d8db0-f12e-455d-b436-f755535886d6" (UID: "4a1d8db0-f12e-455d-b436-f755535886d6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.023033 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.026573 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a1d8db0-f12e-455d-b436-f755535886d6-kube-api-access-kw95k" (OuterVolumeSpecName: "kube-api-access-kw95k") pod "4a1d8db0-f12e-455d-b436-f755535886d6" (UID: "4a1d8db0-f12e-455d-b436-f755535886d6"). InnerVolumeSpecName "kube-api-access-kw95k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.073275 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a1d8db0-f12e-455d-b436-f755535886d6" (UID: "4a1d8db0-f12e-455d-b436-f755535886d6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.124578 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a1d8db0-f12e-455d-b436-f755535886d6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.124618 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kw95k\" (UniqueName: \"kubernetes.io/projected/4a1d8db0-f12e-455d-b436-f755535886d6-kube-api-access-kw95k\") on node \"crc\" DevicePath \"\"" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.395460 4876 generic.go:334] "Generic (PLEG): container finished" podID="4a1d8db0-f12e-455d-b436-f755535886d6" containerID="a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a" exitCode=0 Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.395520 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w98kl" event={"ID":"4a1d8db0-f12e-455d-b436-f755535886d6","Type":"ContainerDied","Data":"a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a"} Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.395559 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w98kl" event={"ID":"4a1d8db0-f12e-455d-b436-f755535886d6","Type":"ContainerDied","Data":"1a10bd34d039cfaddc2eb03560677b9fd6b733ce5bec571f4d79404f47827c3f"} Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.395580 4876 scope.go:117] "RemoveContainer" containerID="a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.395526 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w98kl" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.442791 4876 scope.go:117] "RemoveContainer" containerID="5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.462995 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-w98kl"] Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.479035 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-w98kl"] Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.499262 4876 scope.go:117] "RemoveContainer" containerID="aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.529115 4876 scope.go:117] "RemoveContainer" containerID="a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a" Dec 15 10:22:32 crc kubenswrapper[4876]: E1215 10:22:32.529754 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a\": container with ID starting with a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a not found: ID does not exist" containerID="a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.529835 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a"} err="failed to get container status \"a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a\": rpc error: code = NotFound desc = could not find container \"a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a\": container with ID starting with a66a83edf4487162298f389ecbb6f135482342709041c6ac2eae4a472f218f1a not found: ID does not exist" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.529924 4876 scope.go:117] "RemoveContainer" containerID="5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f" Dec 15 10:22:32 crc kubenswrapper[4876]: E1215 10:22:32.530531 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f\": container with ID starting with 5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f not found: ID does not exist" containerID="5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.530588 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f"} err="failed to get container status \"5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f\": rpc error: code = NotFound desc = could not find container \"5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f\": container with ID starting with 5035c4668af7bf09658107a71b6961ef9bf124e963732d3ea9fd7226efa38d9f not found: ID does not exist" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.530624 4876 scope.go:117] "RemoveContainer" containerID="aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499" Dec 15 10:22:32 crc kubenswrapper[4876]: E1215 10:22:32.531028 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499\": container with ID starting with aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499 not found: ID does not exist" containerID="aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.531060 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499"} err="failed to get container status \"aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499\": rpc error: code = NotFound desc = could not find container \"aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499\": container with ID starting with aeb2a8fa8dc4de8a46ef0f6fac2aa85297beda5b5a71ee64a60726e6db67b499 not found: ID does not exist" Dec 15 10:22:32 crc kubenswrapper[4876]: I1215 10:22:32.725002 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a1d8db0-f12e-455d-b436-f755535886d6" path="/var/lib/kubelet/pods/4a1d8db0-f12e-455d-b436-f755535886d6/volumes" Dec 15 10:22:38 crc kubenswrapper[4876]: I1215 10:22:38.706518 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:22:38 crc kubenswrapper[4876]: E1215 10:22:38.707056 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:22:49 crc kubenswrapper[4876]: I1215 10:22:49.706172 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:22:49 crc kubenswrapper[4876]: E1215 10:22:49.706926 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:22:58 crc kubenswrapper[4876]: I1215 10:22:58.920157 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sm5rx"] Dec 15 10:22:58 crc kubenswrapper[4876]: E1215 10:22:58.921184 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a1d8db0-f12e-455d-b436-f755535886d6" containerName="extract-content" Dec 15 10:22:58 crc kubenswrapper[4876]: I1215 10:22:58.921203 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a1d8db0-f12e-455d-b436-f755535886d6" containerName="extract-content" Dec 15 10:22:58 crc kubenswrapper[4876]: E1215 10:22:58.921230 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a1d8db0-f12e-455d-b436-f755535886d6" containerName="extract-utilities" Dec 15 10:22:58 crc kubenswrapper[4876]: I1215 10:22:58.921239 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a1d8db0-f12e-455d-b436-f755535886d6" containerName="extract-utilities" Dec 15 10:22:58 crc kubenswrapper[4876]: E1215 10:22:58.921268 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a1d8db0-f12e-455d-b436-f755535886d6" containerName="registry-server" Dec 15 10:22:58 crc kubenswrapper[4876]: I1215 10:22:58.921276 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a1d8db0-f12e-455d-b436-f755535886d6" containerName="registry-server" Dec 15 10:22:58 crc kubenswrapper[4876]: I1215 10:22:58.921503 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a1d8db0-f12e-455d-b436-f755535886d6" containerName="registry-server" Dec 15 10:22:58 crc kubenswrapper[4876]: I1215 10:22:58.923077 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:58 crc kubenswrapper[4876]: I1215 10:22:58.934546 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sm5rx"] Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.081630 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxxwd\" (UniqueName: \"kubernetes.io/projected/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-kube-api-access-mxxwd\") pod \"community-operators-sm5rx\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.081710 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-utilities\") pod \"community-operators-sm5rx\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.081741 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-catalog-content\") pod \"community-operators-sm5rx\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.183628 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxxwd\" (UniqueName: \"kubernetes.io/projected/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-kube-api-access-mxxwd\") pod \"community-operators-sm5rx\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.183689 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-utilities\") pod \"community-operators-sm5rx\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.183719 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-catalog-content\") pod \"community-operators-sm5rx\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.184246 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-catalog-content\") pod \"community-operators-sm5rx\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.184473 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-utilities\") pod \"community-operators-sm5rx\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.215024 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxxwd\" (UniqueName: \"kubernetes.io/projected/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-kube-api-access-mxxwd\") pod \"community-operators-sm5rx\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.250018 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:22:59 crc kubenswrapper[4876]: I1215 10:22:59.806477 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sm5rx"] Dec 15 10:23:00 crc kubenswrapper[4876]: I1215 10:23:00.720684 4876 generic.go:334] "Generic (PLEG): container finished" podID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerID="fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4" exitCode=0 Dec 15 10:23:00 crc kubenswrapper[4876]: I1215 10:23:00.720740 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm5rx" event={"ID":"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67","Type":"ContainerDied","Data":"fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4"} Dec 15 10:23:00 crc kubenswrapper[4876]: I1215 10:23:00.721073 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm5rx" event={"ID":"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67","Type":"ContainerStarted","Data":"b696ce5700d9ac00c5130616cfc94696bd2e4e9dda879c7e3ba9f42127570b84"} Dec 15 10:23:02 crc kubenswrapper[4876]: I1215 10:23:02.741773 4876 generic.go:334] "Generic (PLEG): container finished" podID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerID="79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914" exitCode=0 Dec 15 10:23:02 crc kubenswrapper[4876]: I1215 10:23:02.741855 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm5rx" event={"ID":"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67","Type":"ContainerDied","Data":"79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914"} Dec 15 10:23:03 crc kubenswrapper[4876]: I1215 10:23:03.755257 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm5rx" event={"ID":"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67","Type":"ContainerStarted","Data":"68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008"} Dec 15 10:23:03 crc kubenswrapper[4876]: I1215 10:23:03.777818 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sm5rx" podStartSLOduration=3.021562031 podStartE2EDuration="5.777797868s" podCreationTimestamp="2025-12-15 10:22:58 +0000 UTC" firstStartedPulling="2025-12-15 10:23:00.723005207 +0000 UTC m=+12706.294148128" lastFinishedPulling="2025-12-15 10:23:03.479241054 +0000 UTC m=+12709.050383965" observedRunningTime="2025-12-15 10:23:03.773042774 +0000 UTC m=+12709.344185705" watchObservedRunningTime="2025-12-15 10:23:03.777797868 +0000 UTC m=+12709.348940779" Dec 15 10:23:04 crc kubenswrapper[4876]: I1215 10:23:04.713021 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:23:04 crc kubenswrapper[4876]: E1215 10:23:04.713549 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:23:09 crc kubenswrapper[4876]: I1215 10:23:09.250516 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:23:09 crc kubenswrapper[4876]: I1215 10:23:09.251170 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:23:09 crc kubenswrapper[4876]: I1215 10:23:09.299615 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:23:09 crc kubenswrapper[4876]: I1215 10:23:09.860145 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:23:09 crc kubenswrapper[4876]: I1215 10:23:09.914566 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sm5rx"] Dec 15 10:23:11 crc kubenswrapper[4876]: I1215 10:23:11.835073 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sm5rx" podUID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerName="registry-server" containerID="cri-o://68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008" gracePeriod=2 Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.370469 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.480861 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxxwd\" (UniqueName: \"kubernetes.io/projected/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-kube-api-access-mxxwd\") pod \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.481285 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-utilities\") pod \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.481589 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-catalog-content\") pod \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\" (UID: \"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67\") " Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.482908 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-utilities" (OuterVolumeSpecName: "utilities") pod "6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" (UID: "6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.495493 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-kube-api-access-mxxwd" (OuterVolumeSpecName: "kube-api-access-mxxwd") pod "6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" (UID: "6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67"). InnerVolumeSpecName "kube-api-access-mxxwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.584773 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxxwd\" (UniqueName: \"kubernetes.io/projected/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-kube-api-access-mxxwd\") on node \"crc\" DevicePath \"\"" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.585031 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.626630 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" (UID: "6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.687506 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.847922 4876 generic.go:334] "Generic (PLEG): container finished" podID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerID="68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008" exitCode=0 Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.847977 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm5rx" event={"ID":"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67","Type":"ContainerDied","Data":"68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008"} Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.847984 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sm5rx" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.848013 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sm5rx" event={"ID":"6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67","Type":"ContainerDied","Data":"b696ce5700d9ac00c5130616cfc94696bd2e4e9dda879c7e3ba9f42127570b84"} Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.848038 4876 scope.go:117] "RemoveContainer" containerID="68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.878226 4876 scope.go:117] "RemoveContainer" containerID="79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.878750 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sm5rx"] Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.899124 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sm5rx"] Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.902305 4876 scope.go:117] "RemoveContainer" containerID="fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.965314 4876 scope.go:117] "RemoveContainer" containerID="68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008" Dec 15 10:23:12 crc kubenswrapper[4876]: E1215 10:23:12.965759 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008\": container with ID starting with 68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008 not found: ID does not exist" containerID="68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.965805 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008"} err="failed to get container status \"68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008\": rpc error: code = NotFound desc = could not find container \"68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008\": container with ID starting with 68bc7e65a51bf27f29cdd69b11135bbdd83fd40af1f59f6189a817150d475008 not found: ID does not exist" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.965829 4876 scope.go:117] "RemoveContainer" containerID="79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914" Dec 15 10:23:12 crc kubenswrapper[4876]: E1215 10:23:12.966038 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914\": container with ID starting with 79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914 not found: ID does not exist" containerID="79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.966065 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914"} err="failed to get container status \"79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914\": rpc error: code = NotFound desc = could not find container \"79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914\": container with ID starting with 79fe7d6a6c0a855a14e475a3c498fb192fcaed9c8fb969059ac88929026ca914 not found: ID does not exist" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.966078 4876 scope.go:117] "RemoveContainer" containerID="fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4" Dec 15 10:23:12 crc kubenswrapper[4876]: E1215 10:23:12.966330 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4\": container with ID starting with fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4 not found: ID does not exist" containerID="fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4" Dec 15 10:23:12 crc kubenswrapper[4876]: I1215 10:23:12.966361 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4"} err="failed to get container status \"fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4\": rpc error: code = NotFound desc = could not find container \"fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4\": container with ID starting with fc4a0446adff28805578f6a4a6166e9433f7c1d2f1f97afb035f21fcc4baada4 not found: ID does not exist" Dec 15 10:23:14 crc kubenswrapper[4876]: I1215 10:23:14.756172 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" path="/var/lib/kubelet/pods/6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67/volumes" Dec 15 10:23:17 crc kubenswrapper[4876]: I1215 10:23:17.706036 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:23:17 crc kubenswrapper[4876]: E1215 10:23:17.707085 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:23:30 crc kubenswrapper[4876]: I1215 10:23:30.706886 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:23:30 crc kubenswrapper[4876]: E1215 10:23:30.708584 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:23:42 crc kubenswrapper[4876]: I1215 10:23:42.706432 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:23:42 crc kubenswrapper[4876]: E1215 10:23:42.707655 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:23:57 crc kubenswrapper[4876]: I1215 10:23:57.706138 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:23:57 crc kubenswrapper[4876]: E1215 10:23:57.707728 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:24:11 crc kubenswrapper[4876]: I1215 10:24:11.706345 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:24:11 crc kubenswrapper[4876]: E1215 10:24:11.707196 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:24:17 crc kubenswrapper[4876]: I1215 10:24:17.412449 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_b6f1b67c-1f7c-4748-8530-cf99c3859e6d/init-config-reloader/0.log" Dec 15 10:24:17 crc kubenswrapper[4876]: I1215 10:24:17.564560 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_b6f1b67c-1f7c-4748-8530-cf99c3859e6d/init-config-reloader/0.log" Dec 15 10:24:17 crc kubenswrapper[4876]: I1215 10:24:17.581372 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_b6f1b67c-1f7c-4748-8530-cf99c3859e6d/alertmanager/0.log" Dec 15 10:24:17 crc kubenswrapper[4876]: I1215 10:24:17.618923 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_b6f1b67c-1f7c-4748-8530-cf99c3859e6d/config-reloader/0.log" Dec 15 10:24:17 crc kubenswrapper[4876]: I1215 10:24:17.772193 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_dcd2c908-a394-4285-a770-7d933f4491f2/aodh-api/0.log" Dec 15 10:24:17 crc kubenswrapper[4876]: I1215 10:24:17.835343 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_dcd2c908-a394-4285-a770-7d933f4491f2/aodh-listener/0.log" Dec 15 10:24:17 crc kubenswrapper[4876]: I1215 10:24:17.855032 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_dcd2c908-a394-4285-a770-7d933f4491f2/aodh-evaluator/0.log" Dec 15 10:24:17 crc kubenswrapper[4876]: I1215 10:24:17.926268 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_dcd2c908-a394-4285-a770-7d933f4491f2/aodh-notifier/0.log" Dec 15 10:24:18 crc kubenswrapper[4876]: I1215 10:24:18.050020 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-844649956b-xbt9v_c92d69ad-8051-4a8d-a271-9af1d845ae73/barbican-api/0.log" Dec 15 10:24:18 crc kubenswrapper[4876]: I1215 10:24:18.059496 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-844649956b-xbt9v_c92d69ad-8051-4a8d-a271-9af1d845ae73/barbican-api-log/0.log" Dec 15 10:24:18 crc kubenswrapper[4876]: I1215 10:24:18.213075 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-57f9dd55d6-zbz7c_14199da7-b1e2-4729-8a0c-0b94c7166885/barbican-keystone-listener/0.log" Dec 15 10:24:18 crc kubenswrapper[4876]: I1215 10:24:18.406374 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-c776c4f89-jznbk_fc70e06c-5856-4e75-b2d4-8560e7a443d8/barbican-worker/0.log" Dec 15 10:24:18 crc kubenswrapper[4876]: I1215 10:24:18.436035 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-c776c4f89-jznbk_fc70e06c-5856-4e75-b2d4-8560e7a443d8/barbican-worker-log/0.log" Dec 15 10:24:18 crc kubenswrapper[4876]: I1215 10:24:18.781954 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-m2xj6_c96ac8f3-3c83-4e9d-a9f1-e7855a9ad9a2/bootstrap-openstack-openstack-cell1/0.log" Dec 15 10:24:18 crc kubenswrapper[4876]: I1215 10:24:18.959475 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-networker-wjdpb_318cc9be-d461-48c7-83ee-f2a3cfe88b08/bootstrap-openstack-openstack-networker/0.log" Dec 15 10:24:19 crc kubenswrapper[4876]: I1215 10:24:19.095874 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3244439e-cc67-4e8d-abc0-1907df4aa1c7/ceilometer-central-agent/0.log" Dec 15 10:24:19 crc kubenswrapper[4876]: I1215 10:24:19.144502 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-57f9dd55d6-zbz7c_14199da7-b1e2-4729-8a0c-0b94c7166885/barbican-keystone-listener-log/0.log" Dec 15 10:24:19 crc kubenswrapper[4876]: I1215 10:24:19.232921 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3244439e-cc67-4e8d-abc0-1907df4aa1c7/ceilometer-notification-agent/0.log" Dec 15 10:24:19 crc kubenswrapper[4876]: I1215 10:24:19.244414 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3244439e-cc67-4e8d-abc0-1907df4aa1c7/proxy-httpd/0.log" Dec 15 10:24:19 crc kubenswrapper[4876]: I1215 10:24:19.303774 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_3244439e-cc67-4e8d-abc0-1907df4aa1c7/sg-core/0.log" Dec 15 10:24:19 crc kubenswrapper[4876]: I1215 10:24:19.458561 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-tmvmx_92849ab3-8a5c-48e4-9426-7a5e49371db3/ceph-client-openstack-openstack-cell1/0.log" Dec 15 10:24:19 crc kubenswrapper[4876]: I1215 10:24:19.733092 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2d55cdb6-397e-475e-9e07-6a6ab4b4342d/cinder-api-log/0.log" Dec 15 10:24:19 crc kubenswrapper[4876]: I1215 10:24:19.914156 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2d55cdb6-397e-475e-9e07-6a6ab4b4342d/cinder-api/0.log" Dec 15 10:24:20 crc kubenswrapper[4876]: I1215 10:24:20.078519 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_883b9aed-2112-44d2-8698-3446934a0c21/probe/0.log" Dec 15 10:24:20 crc kubenswrapper[4876]: I1215 10:24:20.201906 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_3b2c2c29-aa94-4339-b6b7-95ee7f48dd73/cinder-scheduler/0.log" Dec 15 10:24:20 crc kubenswrapper[4876]: I1215 10:24:20.545333 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_3b2c2c29-aa94-4339-b6b7-95ee7f48dd73/probe/0.log" Dec 15 10:24:20 crc kubenswrapper[4876]: I1215 10:24:20.833461 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_1c5a822f-3b02-41d0-b70e-2254bef1ea34/probe/0.log" Dec 15 10:24:21 crc kubenswrapper[4876]: I1215 10:24:21.077015 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-hw86n_f569b737-56ff-4e5e-af32-807e3a8ab69a/configure-network-openstack-openstack-cell1/0.log" Dec 15 10:24:21 crc kubenswrapper[4876]: I1215 10:24:21.305489 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_883b9aed-2112-44d2-8698-3446934a0c21/cinder-backup/0.log" Dec 15 10:24:21 crc kubenswrapper[4876]: I1215 10:24:21.367746 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-networker-sjzvd_08da1b90-b47e-4073-92df-d3eb34b32ac1/configure-network-openstack-openstack-networker/0.log" Dec 15 10:24:21 crc kubenswrapper[4876]: I1215 10:24:21.635430 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-cvdtp_fb76a082-a81a-498d-9be3-3af9e9b5b01e/configure-os-openstack-openstack-cell1/0.log" Dec 15 10:24:21 crc kubenswrapper[4876]: I1215 10:24:21.717932 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-networker-xvx2j_88c5fd56-5e02-4dfc-a89e-06fa1e22f297/configure-os-openstack-openstack-networker/0.log" Dec 15 10:24:21 crc kubenswrapper[4876]: I1215 10:24:21.894032 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5694c8549f-nhlb9_67dfe15a-c41b-46c1-bc0a-0a089a4cfabd/init/0.log" Dec 15 10:24:22 crc kubenswrapper[4876]: I1215 10:24:22.131167 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5694c8549f-nhlb9_67dfe15a-c41b-46c1-bc0a-0a089a4cfabd/init/0.log" Dec 15 10:24:22 crc kubenswrapper[4876]: I1215 10:24:22.196247 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-799z9_ca266618-8aff-4574-8a86-d987f671b431/download-cache-openstack-openstack-cell1/0.log" Dec 15 10:24:22 crc kubenswrapper[4876]: I1215 10:24:22.314578 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5694c8549f-nhlb9_67dfe15a-c41b-46c1-bc0a-0a089a4cfabd/dnsmasq-dns/0.log" Dec 15 10:24:22 crc kubenswrapper[4876]: I1215 10:24:22.430092 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-networker-2f7nm_14a2c5ff-3623-4e41-9c5e-43f880d1f7bf/download-cache-openstack-openstack-networker/0.log" Dec 15 10:24:22 crc kubenswrapper[4876]: I1215 10:24:22.686561 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7067c227-f2bf-4b06-975c-9b66655d1d2c/glance-httpd/0.log" Dec 15 10:24:22 crc kubenswrapper[4876]: I1215 10:24:22.736929 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_7067c227-f2bf-4b06-975c-9b66655d1d2c/glance-log/0.log" Dec 15 10:24:22 crc kubenswrapper[4876]: I1215 10:24:22.894873 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_487e865a-734d-433a-8ecd-8c2839afe8db/glance-httpd/0.log" Dec 15 10:24:22 crc kubenswrapper[4876]: I1215 10:24:22.930931 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_487e865a-734d-433a-8ecd-8c2839afe8db/glance-log/0.log" Dec 15 10:24:22 crc kubenswrapper[4876]: I1215 10:24:22.945873 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_1c5a822f-3b02-41d0-b70e-2254bef1ea34/cinder-volume/0.log" Dec 15 10:24:23 crc kubenswrapper[4876]: I1215 10:24:23.165268 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-5f59b99c48-hfp75_e02b5464-466b-4b2c-8b17-6f95eb820c4c/heat-api/0.log" Dec 15 10:24:23 crc kubenswrapper[4876]: I1215 10:24:23.270313 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-75495b79b7-z2zxg_baa6ee5f-7b6e-479f-882a-68ed0f9c1a23/heat-engine/0.log" Dec 15 10:24:23 crc kubenswrapper[4876]: I1215 10:24:23.329391 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-6dd84dfc-l68t4_4fdccad6-fd2e-4b94-a2ad-6292ccfb2312/heat-cfnapi/0.log" Dec 15 10:24:23 crc kubenswrapper[4876]: I1215 10:24:23.529549 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-78569d8b45-bcs6r_0067ba26-263c-4df8-8249-74e9b72509ee/horizon/0.log" Dec 15 10:24:23 crc kubenswrapper[4876]: I1215 10:24:23.627357 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-4v4dc_3656635c-619e-44de-8647-e7d82f850931/install-certs-openstack-openstack-cell1/0.log" Dec 15 10:24:23 crc kubenswrapper[4876]: I1215 10:24:23.659042 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-78569d8b45-bcs6r_0067ba26-263c-4df8-8249-74e9b72509ee/horizon-log/0.log" Dec 15 10:24:23 crc kubenswrapper[4876]: I1215 10:24:23.782635 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-networker-ll6qb_8f5d7261-64ac-49b8-a081-34c57e792c7b/install-certs-openstack-openstack-networker/0.log" Dec 15 10:24:23 crc kubenswrapper[4876]: I1215 10:24:23.938820 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-hgcsp_3df0942a-900c-413a-9cde-671328743f83/install-os-openstack-openstack-cell1/0.log" Dec 15 10:24:23 crc kubenswrapper[4876]: I1215 10:24:23.990165 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-networker-vwk9m_79b4891a-c87e-4cbf-a506-c1dce22f2555/install-os-openstack-openstack-networker/0.log" Dec 15 10:24:24 crc kubenswrapper[4876]: I1215 10:24:24.184321 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29429821-l6qcz_62fa6354-9ea9-4586-814b-6f4e9dd53da2/keystone-cron/0.log" Dec 15 10:24:24 crc kubenswrapper[4876]: I1215 10:24:24.360677 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29429881-6jcgx_866c0912-2922-4832-8398-4953a10b5661/keystone-cron/0.log" Dec 15 10:24:24 crc kubenswrapper[4876]: I1215 10:24:24.671633 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_7188e583-ec09-4936-b3bd-e66a6f0c7c3e/kube-state-metrics/0.log" Dec 15 10:24:24 crc kubenswrapper[4876]: I1215 10:24:24.712821 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:24:24 crc kubenswrapper[4876]: E1215 10:24:24.713192 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:24:24 crc kubenswrapper[4876]: I1215 10:24:24.830908 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-bxvgp_9440ef3b-7ac3-4e2d-b34b-fd11e44b9134/libvirt-openstack-openstack-cell1/0.log" Dec 15 10:24:25 crc kubenswrapper[4876]: I1215 10:24:25.436060 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_48244660-aaad-48db-ba7a-ae76adc56672/manila-scheduler/0.log" Dec 15 10:24:25 crc kubenswrapper[4876]: I1215 10:24:25.441818 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_48244660-aaad-48db-ba7a-ae76adc56672/probe/0.log" Dec 15 10:24:25 crc kubenswrapper[4876]: I1215 10:24:25.493511 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-9776897b9-wfbwj_d10b248e-69de-4f99-9e4b-afb3c7c1e53e/keystone-api/0.log" Dec 15 10:24:25 crc kubenswrapper[4876]: I1215 10:24:25.558745 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_cc3003f4-6959-4f1e-8c14-9a17f56e5112/manila-api/0.log" Dec 15 10:24:25 crc kubenswrapper[4876]: I1215 10:24:25.707277 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_cc3003f4-6959-4f1e-8c14-9a17f56e5112/manila-api-log/0.log" Dec 15 10:24:25 crc kubenswrapper[4876]: I1215 10:24:25.764992 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_9db8fee6-4ebb-40cf-a695-03c92d3936d6/manila-share/0.log" Dec 15 10:24:25 crc kubenswrapper[4876]: I1215 10:24:25.822204 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_9db8fee6-4ebb-40cf-a695-03c92d3936d6/probe/0.log" Dec 15 10:24:26 crc kubenswrapper[4876]: I1215 10:24:26.385024 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-tk9mr_69d2e77c-a14c-43ab-ac11-90cd6e7808f5/neutron-dhcp-openstack-openstack-cell1/0.log" Dec 15 10:24:26 crc kubenswrapper[4876]: I1215 10:24:26.489199 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6799547699-m5p7w_cdafcf42-6822-412c-8511-d6b45c9e6a62/neutron-httpd/0.log" Dec 15 10:24:26 crc kubenswrapper[4876]: I1215 10:24:26.729559 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-xrzd5_75af2aac-a703-42f7-a933-4f2bac4af01f/neutron-metadata-openstack-openstack-cell1/0.log" Dec 15 10:24:26 crc kubenswrapper[4876]: I1215 10:24:26.915658 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-networker-t2m59_946096c8-3935-4428-ad25-7c10e755784d/neutron-metadata-openstack-openstack-networker/0.log" Dec 15 10:24:26 crc kubenswrapper[4876]: I1215 10:24:26.959533 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6799547699-m5p7w_cdafcf42-6822-412c-8511-d6b45c9e6a62/neutron-api/0.log" Dec 15 10:24:27 crc kubenswrapper[4876]: I1215 10:24:27.050384 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-78w4n_b53427f3-ee4e-41ab-ab2c-bfae9d73dc23/neutron-sriov-openstack-openstack-cell1/0.log" Dec 15 10:24:27 crc kubenswrapper[4876]: I1215 10:24:27.560137 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_88581f4e-4f4d-42e8-a59f-5bd7385d9485/nova-api-api/0.log" Dec 15 10:24:27 crc kubenswrapper[4876]: I1215 10:24:27.581005 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_51c89021-f333-443b-bbaa-d0c236ce716b/nova-cell0-conductor-conductor/0.log" Dec 15 10:24:27 crc kubenswrapper[4876]: I1215 10:24:27.709935 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_88581f4e-4f4d-42e8-a59f-5bd7385d9485/nova-api-log/0.log" Dec 15 10:24:27 crc kubenswrapper[4876]: I1215 10:24:27.936289 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_ce9bf6a7-85ad-46ef-bf55-0e48140b4abf/nova-cell1-conductor-conductor/0.log" Dec 15 10:24:28 crc kubenswrapper[4876]: I1215 10:24:28.003912 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_5bad51e1-7309-47a0-8d07-e3f4d9bbd104/nova-cell1-novncproxy-novncproxy/0.log" Dec 15 10:24:28 crc kubenswrapper[4876]: I1215 10:24:28.272781 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell2x97h_dbb71641-2cea-47e7-af95-5dfe1de074d2/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Dec 15 10:24:28 crc kubenswrapper[4876]: I1215 10:24:28.405389 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-gxkq7_37dbdd47-81c0-4e73-b82c-7e8ceb930cc3/nova-cell1-openstack-openstack-cell1/0.log" Dec 15 10:24:28 crc kubenswrapper[4876]: I1215 10:24:28.543296 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e0e287f5-1611-44e0-bc10-8ca467a89dbe/nova-metadata-log/0.log" Dec 15 10:24:28 crc kubenswrapper[4876]: I1215 10:24:28.644300 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_e0e287f5-1611-44e0-bc10-8ca467a89dbe/nova-metadata-metadata/0.log" Dec 15 10:24:28 crc kubenswrapper[4876]: I1215 10:24:28.815776 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_21066d50-c54c-4d8d-8b53-84c8e8abbeb0/nova-scheduler-scheduler/0.log" Dec 15 10:24:28 crc kubenswrapper[4876]: I1215 10:24:28.900213 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e90da418-4119-4738-85b2-58075f0eac3a/mysql-bootstrap/0.log" Dec 15 10:24:29 crc kubenswrapper[4876]: I1215 10:24:29.128240 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e90da418-4119-4738-85b2-58075f0eac3a/mysql-bootstrap/0.log" Dec 15 10:24:29 crc kubenswrapper[4876]: I1215 10:24:29.137260 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_e90da418-4119-4738-85b2-58075f0eac3a/galera/0.log" Dec 15 10:24:29 crc kubenswrapper[4876]: I1215 10:24:29.198906 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ebd41608-1877-4f89-8a43-1d902a246616/mysql-bootstrap/0.log" Dec 15 10:24:29 crc kubenswrapper[4876]: I1215 10:24:29.430164 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ebd41608-1877-4f89-8a43-1d902a246616/mysql-bootstrap/0.log" Dec 15 10:24:29 crc kubenswrapper[4876]: I1215 10:24:29.469697 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ebd41608-1877-4f89-8a43-1d902a246616/galera/0.log" Dec 15 10:24:29 crc kubenswrapper[4876]: I1215 10:24:29.472915 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_b7f9b10e-34c0-4e6b-9732-ca34348c4eac/openstackclient/0.log" Dec 15 10:24:29 crc kubenswrapper[4876]: I1215 10:24:29.691304 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8/openstack-network-exporter/0.log" Dec 15 10:24:29 crc kubenswrapper[4876]: I1215 10:24:29.753028 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_48c6e7c8-91ea-46c5-8ee8-1137c2ce87f8/ovn-northd/0.log" Dec 15 10:24:30 crc kubenswrapper[4876]: I1215 10:24:30.037371 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-ls542_4ef9be2e-a2e2-4004-b6f9-7872140a2422/ovn-openstack-openstack-cell1/0.log" Dec 15 10:24:30 crc kubenswrapper[4876]: I1215 10:24:30.230566 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b6826c99-fe6e-4280-ae05-f5f6794d2f74/openstack-network-exporter/0.log" Dec 15 10:24:30 crc kubenswrapper[4876]: I1215 10:24:30.278811 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b6826c99-fe6e-4280-ae05-f5f6794d2f74/ovsdbserver-nb/0.log" Dec 15 10:24:30 crc kubenswrapper[4876]: I1215 10:24:30.306990 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-networker-9lmb2_1dab852d-2b8c-4343-9bde-ea3218464e8f/ovn-openstack-openstack-networker/0.log" Dec 15 10:24:30 crc kubenswrapper[4876]: I1215 10:24:30.503055 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_d76c52a3-b9f7-4807-9642-fa62750a87f3/ovsdbserver-nb/0.log" Dec 15 10:24:30 crc kubenswrapper[4876]: I1215 10:24:30.543802 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_d76c52a3-b9f7-4807-9642-fa62750a87f3/openstack-network-exporter/0.log" Dec 15 10:24:30 crc kubenswrapper[4876]: I1215 10:24:30.712645 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_cf9e91ec-2a1f-450f-8736-9d5f50335754/openstack-network-exporter/0.log" Dec 15 10:24:30 crc kubenswrapper[4876]: I1215 10:24:30.728722 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_cf9e91ec-2a1f-450f-8736-9d5f50335754/ovsdbserver-nb/0.log" Dec 15 10:24:30 crc kubenswrapper[4876]: I1215 10:24:30.847031 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c02a6599-0085-4e35-8bad-f9512ef7ef42/openstack-network-exporter/0.log" Dec 15 10:24:30 crc kubenswrapper[4876]: I1215 10:24:30.958255 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c02a6599-0085-4e35-8bad-f9512ef7ef42/ovsdbserver-sb/0.log" Dec 15 10:24:31 crc kubenswrapper[4876]: I1215 10:24:31.018961 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_afb0565c-3624-4f18-bc08-157348daa031/openstack-network-exporter/0.log" Dec 15 10:24:31 crc kubenswrapper[4876]: I1215 10:24:31.099478 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_afb0565c-3624-4f18-bc08-157348daa031/ovsdbserver-sb/0.log" Dec 15 10:24:31 crc kubenswrapper[4876]: I1215 10:24:31.278439 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_b0df166e-bbcc-4954-88a7-73ecc378bcfa/openstack-network-exporter/0.log" Dec 15 10:24:31 crc kubenswrapper[4876]: I1215 10:24:31.285425 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_b0df166e-bbcc-4954-88a7-73ecc378bcfa/ovsdbserver-sb/0.log" Dec 15 10:24:31 crc kubenswrapper[4876]: I1215 10:24:31.632862 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-cqkcnr_c38d67c8-ae96-4f07-a552-413a21e47d80/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Dec 15 10:24:31 crc kubenswrapper[4876]: I1215 10:24:31.708699 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-84f5ccbfbd-njqxx_f5463000-4fbc-4eb1-9c34-50270510d74e/placement-api/0.log" Dec 15 10:24:31 crc kubenswrapper[4876]: I1215 10:24:31.814398 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-84f5ccbfbd-njqxx_f5463000-4fbc-4eb1-9c34-50270510d74e/placement-log/0.log" Dec 15 10:24:31 crc kubenswrapper[4876]: I1215 10:24:31.863800 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-ngn58r_c1ca81a1-767a-4f98-93a1-0f04472a134c/pre-adoption-validation-openstack-pre-adoption-openstack-networ/0.log" Dec 15 10:24:32 crc kubenswrapper[4876]: I1215 10:24:32.097186 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e/init-config-reloader/0.log" Dec 15 10:24:32 crc kubenswrapper[4876]: I1215 10:24:32.233128 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e/init-config-reloader/0.log" Dec 15 10:24:32 crc kubenswrapper[4876]: I1215 10:24:32.274086 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e/config-reloader/0.log" Dec 15 10:24:32 crc kubenswrapper[4876]: I1215 10:24:32.277146 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e/prometheus/0.log" Dec 15 10:24:32 crc kubenswrapper[4876]: I1215 10:24:32.319917 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_6821bc38-0d6b-4c4d-aa4a-6b7da625ba3e/thanos-sidecar/0.log" Dec 15 10:24:32 crc kubenswrapper[4876]: I1215 10:24:32.487584 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c0cb0b72-61fd-4427-ac5d-c91e21cac028/setup-container/0.log" Dec 15 10:24:32 crc kubenswrapper[4876]: I1215 10:24:32.767961 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c0cb0b72-61fd-4427-ac5d-c91e21cac028/setup-container/0.log" Dec 15 10:24:32 crc kubenswrapper[4876]: I1215 10:24:32.801526 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_4d08f1c6-5e22-447b-8ba4-0205b37e47eb/setup-container/0.log" Dec 15 10:24:32 crc kubenswrapper[4876]: I1215 10:24:32.816357 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c0cb0b72-61fd-4427-ac5d-c91e21cac028/rabbitmq/0.log" Dec 15 10:24:33 crc kubenswrapper[4876]: I1215 10:24:33.029572 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_4d08f1c6-5e22-447b-8ba4-0205b37e47eb/setup-container/0.log" Dec 15 10:24:33 crc kubenswrapper[4876]: I1215 10:24:33.162521 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-kfc9n_5f149f08-66bd-4f27-92f8-d2c23cce8a1e/reboot-os-openstack-openstack-cell1/0.log" Dec 15 10:24:33 crc kubenswrapper[4876]: I1215 10:24:33.309992 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_4d08f1c6-5e22-447b-8ba4-0205b37e47eb/rabbitmq/0.log" Dec 15 10:24:33 crc kubenswrapper[4876]: I1215 10:24:33.393272 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-networker-q9k9q_9c9ecea0-529d-4c59-b9ce-b822f44b99d8/reboot-os-openstack-openstack-networker/0.log" Dec 15 10:24:33 crc kubenswrapper[4876]: I1215 10:24:33.488970 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-gzsfz_0d0d7bdc-e4ee-4add-8c5e-af6695fdda7a/run-os-openstack-openstack-cell1/0.log" Dec 15 10:24:33 crc kubenswrapper[4876]: I1215 10:24:33.630847 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-networker-qc76v_ba1fd6d1-ba1e-4b71-ae44-88c90eebc978/run-os-openstack-openstack-networker/0.log" Dec 15 10:24:33 crc kubenswrapper[4876]: I1215 10:24:33.850944 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-vmx88_f8ae1854-b545-42e7-acbf-c64fc343ae54/ssh-known-hosts-openstack/0.log" Dec 15 10:24:34 crc kubenswrapper[4876]: I1215 10:24:34.226704 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-nb9mg_bad1d359-4c34-4059-9eb0-f2f77d5bf834/telemetry-openstack-openstack-cell1/0.log" Dec 15 10:24:34 crc kubenswrapper[4876]: I1215 10:24:34.238218 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_e81a48d9-360b-467f-9756-d0bfd775042e/test-operator-logs-container/0.log" Dec 15 10:24:34 crc kubenswrapper[4876]: I1215 10:24:34.291128 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_7e4c7322-1b07-4628-85ab-5f72f6f44e04/tempest-tests-tempest-tests-runner/0.log" Dec 15 10:24:34 crc kubenswrapper[4876]: I1215 10:24:34.767636 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-2g279_68234414-ac2d-447e-a5c5-4608dc77b5ed/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Dec 15 10:24:34 crc kubenswrapper[4876]: I1215 10:24:34.816336 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-54wdc_20dc7bae-3650-4be1-af27-121f8ace7a4b/validate-network-openstack-openstack-cell1/0.log" Dec 15 10:24:34 crc kubenswrapper[4876]: I1215 10:24:34.881542 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-networker-gvwwt_dfb2bbb4-3808-4c06-83d4-12f17922a1e8/tripleo-cleanup-tripleo-cleanup-openstack-networker/0.log" Dec 15 10:24:35 crc kubenswrapper[4876]: I1215 10:24:35.071947 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-networker-nq5hm_a8a4628a-a631-4db9-bdd5-7040f07f868c/validate-network-openstack-openstack-networker/0.log" Dec 15 10:24:39 crc kubenswrapper[4876]: I1215 10:24:39.706154 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:24:39 crc kubenswrapper[4876]: E1215 10:24:39.707041 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:24:49 crc kubenswrapper[4876]: I1215 10:24:49.878364 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_b3b4579c-ee28-4f00-b7da-d8f775335c21/memcached/0.log" Dec 15 10:24:51 crc kubenswrapper[4876]: I1215 10:24:51.706169 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:24:51 crc kubenswrapper[4876]: E1215 10:24:51.706751 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:25:02 crc kubenswrapper[4876]: I1215 10:25:02.269914 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-95949466-kdbsr_cbcc0e4f-8e39-4602-9c7a-512ff7ed2b87/manager/0.log" Dec 15 10:25:02 crc kubenswrapper[4876]: I1215 10:25:02.454260 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-5cf45c46bd-djwl2_4b7cafd3-81e2-419f-879e-5761126af781/manager/0.log" Dec 15 10:25:02 crc kubenswrapper[4876]: I1215 10:25:02.526558 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-66f8b87655-fjmj4_7bc70792-76af-4d00-a2b2-2fdd6b89be16/manager/0.log" Dec 15 10:25:02 crc kubenswrapper[4876]: I1215 10:25:02.645343 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b_b41a0796-453e-4d66-81c7-5d7c44def86a/util/0.log" Dec 15 10:25:02 crc kubenswrapper[4876]: I1215 10:25:02.706186 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:25:02 crc kubenswrapper[4876]: E1215 10:25:02.707625 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:25:02 crc kubenswrapper[4876]: I1215 10:25:02.871601 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b_b41a0796-453e-4d66-81c7-5d7c44def86a/pull/0.log" Dec 15 10:25:02 crc kubenswrapper[4876]: I1215 10:25:02.900600 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b_b41a0796-453e-4d66-81c7-5d7c44def86a/pull/0.log" Dec 15 10:25:02 crc kubenswrapper[4876]: I1215 10:25:02.906677 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b_b41a0796-453e-4d66-81c7-5d7c44def86a/util/0.log" Dec 15 10:25:03 crc kubenswrapper[4876]: I1215 10:25:03.087284 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b_b41a0796-453e-4d66-81c7-5d7c44def86a/pull/0.log" Dec 15 10:25:03 crc kubenswrapper[4876]: I1215 10:25:03.093576 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b_b41a0796-453e-4d66-81c7-5d7c44def86a/util/0.log" Dec 15 10:25:03 crc kubenswrapper[4876]: I1215 10:25:03.134278 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ea5d71784a81c95dff2031a02f0a0b3f756f86f14acad8f152d938f56fn4l2b_b41a0796-453e-4d66-81c7-5d7c44def86a/extract/0.log" Dec 15 10:25:03 crc kubenswrapper[4876]: I1215 10:25:03.401962 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-59b8dcb766-8k4s2_c9c3edf2-0545-4a2f-9b53-53317c8c028b/manager/0.log" Dec 15 10:25:03 crc kubenswrapper[4876]: I1215 10:25:03.584311 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-767f9d7567-tmckd_6f2624c6-7e76-4413-b586-246d5cab2346/manager/0.log" Dec 15 10:25:03 crc kubenswrapper[4876]: I1215 10:25:03.604173 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6ccf486b9-ndjnl_7bcbfa3a-5871-44db-b07b-2687a995b87f/manager/0.log" Dec 15 10:25:04 crc kubenswrapper[4876]: I1215 10:25:04.017795 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-f458558d7-pcz5j_70e313c3-63de-489b-bf09-6072d26ad862/manager/0.log" Dec 15 10:25:04 crc kubenswrapper[4876]: I1215 10:25:04.215950 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-58944d7758-m5r6k_ab87f935-876b-42b5-9eb9-85092cd8068f/manager/0.log" Dec 15 10:25:04 crc kubenswrapper[4876]: I1215 10:25:04.318023 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5c7cbf548f-454ft_ff12b926-5daa-4bd3-b49a-154d80442fa8/manager/0.log" Dec 15 10:25:04 crc kubenswrapper[4876]: I1215 10:25:04.320403 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5fdd9786f7-b6nw6_f4c838cf-5a9f-445d-b43b-788ced3037cf/manager/0.log" Dec 15 10:25:04 crc kubenswrapper[4876]: I1215 10:25:04.486897 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f76f4954c-7qzqq_cb583ab2-3f9a-42e3-afd3-630dd25eb152/manager/0.log" Dec 15 10:25:04 crc kubenswrapper[4876]: I1215 10:25:04.673808 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7cd87b778f-v56p5_f589b271-7169-4562-80b9-35f85f4ecfec/manager/0.log" Dec 15 10:25:04 crc kubenswrapper[4876]: I1215 10:25:04.989358 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-68c649d9d-2mxzz_85c28239-78ab-4206-b204-8fa54acf2968/manager/0.log" Dec 15 10:25:04 crc kubenswrapper[4876]: I1215 10:25:04.989691 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5fbbf8b6cc-9mmq8_a590cf12-4e90-44fe-be6c-b392c7e22208/manager/0.log" Dec 15 10:25:05 crc kubenswrapper[4876]: I1215 10:25:05.893613 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-689f887b54x442j_e7ea8ece-c3d4-46bb-9255-6021533983c4/manager/0.log" Dec 15 10:25:06 crc kubenswrapper[4876]: I1215 10:25:06.051767 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-57bbbf4567-v9z5s_b53f15be-f457-4006-9bc5-ad33291b58d5/operator/0.log" Dec 15 10:25:06 crc kubenswrapper[4876]: I1215 10:25:06.414720 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-gr7bv_65ad3502-fd7b-40e9-9d7c-8512f175e63e/registry-server/0.log" Dec 15 10:25:06 crc kubenswrapper[4876]: I1215 10:25:06.474826 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-bf6d4f946-dh78v_94aa3357-932f-4a07-8c32-32b357938142/manager/0.log" Dec 15 10:25:06 crc kubenswrapper[4876]: I1215 10:25:06.716835 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-8665b56d78-8br7q_5a8894cb-9bb5-403e-9003-28e8f771f142/manager/0.log" Dec 15 10:25:06 crc kubenswrapper[4876]: I1215 10:25:06.791808 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-xmt78_6d4d5d32-5a7d-454e-b86d-2513e278a9d5/operator/0.log" Dec 15 10:25:07 crc kubenswrapper[4876]: I1215 10:25:07.058704 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5c6df8f9-cj8nd_970d703f-35b5-41d2-b2b6-fae4c2fba825/manager/0.log" Dec 15 10:25:07 crc kubenswrapper[4876]: I1215 10:25:07.288921 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-756ccf86c7-htkc7_ce1ac899-0b49-4d41-b5a6-3272ef25023a/manager/0.log" Dec 15 10:25:07 crc kubenswrapper[4876]: I1215 10:25:07.353760 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-97d456b9-8vn7l_f43fd495-2325-4fc5-853a-2289e6398417/manager/0.log" Dec 15 10:25:07 crc kubenswrapper[4876]: I1215 10:25:07.471629 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-55f78b7c4c-vm8xh_2af1ad25-d6c3-402a-bbe2-78e0a900a6ca/manager/0.log" Dec 15 10:25:08 crc kubenswrapper[4876]: I1215 10:25:08.990667 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-56f6fbdf6-824mz_3ca69c11-8ce1-4e8c-8e27-e4d70de4d8c8/manager/0.log" Dec 15 10:25:14 crc kubenswrapper[4876]: I1215 10:25:14.749909 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:25:14 crc kubenswrapper[4876]: E1215 10:25:14.750735 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:25:26 crc kubenswrapper[4876]: I1215 10:25:26.706458 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:25:26 crc kubenswrapper[4876]: E1215 10:25:26.709681 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:25:28 crc kubenswrapper[4876]: I1215 10:25:28.178817 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-kk5bp_bfe06425-73ac-4753-bdba-7d1f1797b4c6/control-plane-machine-set-operator/0.log" Dec 15 10:25:28 crc kubenswrapper[4876]: I1215 10:25:28.627116 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fztl2_c6defe56-040f-4e05-9b36-28b7d70481df/kube-rbac-proxy/0.log" Dec 15 10:25:28 crc kubenswrapper[4876]: I1215 10:25:28.637397 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fztl2_c6defe56-040f-4e05-9b36-28b7d70481df/machine-api-operator/0.log" Dec 15 10:25:37 crc kubenswrapper[4876]: I1215 10:25:37.705893 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:25:37 crc kubenswrapper[4876]: E1215 10:25:37.706875 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:25:42 crc kubenswrapper[4876]: I1215 10:25:42.054709 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-qtlm8_0800dd6f-a26c-46bd-86a0-f2280aee8f0e/cert-manager-cainjector/0.log" Dec 15 10:25:42 crc kubenswrapper[4876]: I1215 10:25:42.082426 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-dlnr8_e9476c47-ae65-4abd-a748-87dddd55797d/cert-manager-controller/0.log" Dec 15 10:25:42 crc kubenswrapper[4876]: I1215 10:25:42.125546 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-v4w4z_593d5e06-4f72-483e-ad51-5d2ac67055b3/cert-manager-webhook/0.log" Dec 15 10:25:49 crc kubenswrapper[4876]: I1215 10:25:49.706000 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:25:49 crc kubenswrapper[4876]: E1215 10:25:49.706883 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:25:55 crc kubenswrapper[4876]: I1215 10:25:55.202765 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6ff7998486-8fmvx_c28e7727-e87a-4dd3-ac79-1d737ca00132/nmstate-console-plugin/0.log" Dec 15 10:25:55 crc kubenswrapper[4876]: I1215 10:25:55.333991 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-zhr5r_b9207aa3-de53-4d87-99fe-40c057111a80/nmstate-handler/0.log" Dec 15 10:25:55 crc kubenswrapper[4876]: I1215 10:25:55.463635 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-r88rl_e3bf6a5a-3f30-406f-87d7-f01388bb4210/nmstate-metrics/0.log" Dec 15 10:25:55 crc kubenswrapper[4876]: I1215 10:25:55.485428 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-r88rl_e3bf6a5a-3f30-406f-87d7-f01388bb4210/kube-rbac-proxy/0.log" Dec 15 10:25:55 crc kubenswrapper[4876]: I1215 10:25:55.644094 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-6769fb99d-sww9s_cd43d415-d5f1-47dc-be4a-cf2a0a4c6053/nmstate-operator/0.log" Dec 15 10:25:55 crc kubenswrapper[4876]: I1215 10:25:55.705724 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-f8fb84555-hxr68_56060de2-24fe-4d23-8332-4af3fe7c4c74/nmstate-webhook/0.log" Dec 15 10:26:04 crc kubenswrapper[4876]: I1215 10:26:04.714846 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:26:05 crc kubenswrapper[4876]: I1215 10:26:05.666036 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"28248c6fa065ec3466ee74d526a7667afa9f78607dfa8c5c755858dc4bcfdc51"} Dec 15 10:26:10 crc kubenswrapper[4876]: I1215 10:26:10.767203 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-9r84l_cfac572a-3220-4297-9706-d8a681d42852/kube-rbac-proxy/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.070473 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-9r84l_cfac572a-3220-4297-9706-d8a681d42852/controller/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.189393 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-frr-files/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.318796 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-frr-files/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.385124 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-reloader/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.412926 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-metrics/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.455201 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-reloader/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.635413 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-reloader/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.667282 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-metrics/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.676249 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-metrics/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.700120 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-frr-files/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.889004 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-frr-files/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.903597 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-reloader/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.914538 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/cp-metrics/0.log" Dec 15 10:26:11 crc kubenswrapper[4876]: I1215 10:26:11.962383 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/controller/0.log" Dec 15 10:26:12 crc kubenswrapper[4876]: I1215 10:26:12.121831 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/frr-metrics/0.log" Dec 15 10:26:12 crc kubenswrapper[4876]: I1215 10:26:12.136174 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/kube-rbac-proxy/0.log" Dec 15 10:26:12 crc kubenswrapper[4876]: I1215 10:26:12.233442 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/kube-rbac-proxy-frr/0.log" Dec 15 10:26:12 crc kubenswrapper[4876]: I1215 10:26:12.396013 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/reloader/0.log" Dec 15 10:26:12 crc kubenswrapper[4876]: I1215 10:26:12.440293 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7784b6fcf-rjzg2_704fe5a0-3c3f-4525-a1fc-6f4a5512e6e0/frr-k8s-webhook-server/0.log" Dec 15 10:26:12 crc kubenswrapper[4876]: I1215 10:26:12.723979 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-79c8fdf8b5-hxccn_e94fa05e-79a3-43ac-94c5-55fe38c57a17/manager/0.log" Dec 15 10:26:12 crc kubenswrapper[4876]: I1215 10:26:12.996203 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6687c58c98-v52ns_68e442ac-dc60-4346-a861-102a4dcc5c26/webhook-server/0.log" Dec 15 10:26:13 crc kubenswrapper[4876]: I1215 10:26:13.042835 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-c5kqb_cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24/kube-rbac-proxy/0.log" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.247282 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-c5kqb_cb8e7bb2-4d2a-4b1e-9c13-6e43c4ed6e24/speaker/0.log" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.412828 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pcbtr"] Dec 15 10:26:14 crc kubenswrapper[4876]: E1215 10:26:14.413386 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerName="extract-utilities" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.413409 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerName="extract-utilities" Dec 15 10:26:14 crc kubenswrapper[4876]: E1215 10:26:14.413437 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerName="extract-content" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.413444 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerName="extract-content" Dec 15 10:26:14 crc kubenswrapper[4876]: E1215 10:26:14.413462 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerName="registry-server" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.413468 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerName="registry-server" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.413712 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f8c7b04-04e2-4c0e-b21f-b8e8cbcd4f67" containerName="registry-server" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.427624 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.458710 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pcbtr"] Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.525338 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-catalog-content\") pod \"redhat-operators-pcbtr\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.525408 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjnvk\" (UniqueName: \"kubernetes.io/projected/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-kube-api-access-wjnvk\") pod \"redhat-operators-pcbtr\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.525575 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-utilities\") pod \"redhat-operators-pcbtr\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.627155 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-catalog-content\") pod \"redhat-operators-pcbtr\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.627249 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjnvk\" (UniqueName: \"kubernetes.io/projected/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-kube-api-access-wjnvk\") pod \"redhat-operators-pcbtr\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.627305 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-utilities\") pod \"redhat-operators-pcbtr\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.627958 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-catalog-content\") pod \"redhat-operators-pcbtr\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.627968 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-utilities\") pod \"redhat-operators-pcbtr\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.662550 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjnvk\" (UniqueName: \"kubernetes.io/projected/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-kube-api-access-wjnvk\") pod \"redhat-operators-pcbtr\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:14 crc kubenswrapper[4876]: I1215 10:26:14.750548 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:15 crc kubenswrapper[4876]: I1215 10:26:15.269707 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pcbtr"] Dec 15 10:26:15 crc kubenswrapper[4876]: I1215 10:26:15.787582 4876 generic.go:334] "Generic (PLEG): container finished" podID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerID="4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c" exitCode=0 Dec 15 10:26:15 crc kubenswrapper[4876]: I1215 10:26:15.787872 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcbtr" event={"ID":"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52","Type":"ContainerDied","Data":"4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c"} Dec 15 10:26:15 crc kubenswrapper[4876]: I1215 10:26:15.787979 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcbtr" event={"ID":"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52","Type":"ContainerStarted","Data":"e5b1f41423d1edcc22fddd5a704f2a649b664a4839408851c02377c4b9ec36ff"} Dec 15 10:26:16 crc kubenswrapper[4876]: I1215 10:26:16.131519 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-hmmjw_aaab65ef-e4a8-4bd8-b9ed-3d6fc826ee6b/frr/0.log" Dec 15 10:26:17 crc kubenswrapper[4876]: I1215 10:26:17.808595 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcbtr" event={"ID":"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52","Type":"ContainerStarted","Data":"24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69"} Dec 15 10:26:20 crc kubenswrapper[4876]: I1215 10:26:20.839277 4876 generic.go:334] "Generic (PLEG): container finished" podID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerID="24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69" exitCode=0 Dec 15 10:26:20 crc kubenswrapper[4876]: I1215 10:26:20.839340 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcbtr" event={"ID":"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52","Type":"ContainerDied","Data":"24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69"} Dec 15 10:26:21 crc kubenswrapper[4876]: I1215 10:26:21.849315 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcbtr" event={"ID":"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52","Type":"ContainerStarted","Data":"a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5"} Dec 15 10:26:21 crc kubenswrapper[4876]: I1215 10:26:21.872551 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pcbtr" podStartSLOduration=2.315219929 podStartE2EDuration="7.872526456s" podCreationTimestamp="2025-12-15 10:26:14 +0000 UTC" firstStartedPulling="2025-12-15 10:26:15.789506371 +0000 UTC m=+12901.360649282" lastFinishedPulling="2025-12-15 10:26:21.346812898 +0000 UTC m=+12906.917955809" observedRunningTime="2025-12-15 10:26:21.869561349 +0000 UTC m=+12907.440704260" watchObservedRunningTime="2025-12-15 10:26:21.872526456 +0000 UTC m=+12907.443669377" Dec 15 10:26:24 crc kubenswrapper[4876]: I1215 10:26:24.752910 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:24 crc kubenswrapper[4876]: I1215 10:26:24.754912 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:25 crc kubenswrapper[4876]: I1215 10:26:25.820701 4876 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pcbtr" podUID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerName="registry-server" probeResult="failure" output=< Dec 15 10:26:25 crc kubenswrapper[4876]: timeout: failed to connect service ":50051" within 1s Dec 15 10:26:25 crc kubenswrapper[4876]: > Dec 15 10:26:28 crc kubenswrapper[4876]: I1215 10:26:28.206044 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm_45a7dec0-345b-4689-b7b6-9d429648f610/util/0.log" Dec 15 10:26:28 crc kubenswrapper[4876]: I1215 10:26:28.398682 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm_45a7dec0-345b-4689-b7b6-9d429648f610/pull/0.log" Dec 15 10:26:28 crc kubenswrapper[4876]: I1215 10:26:28.409752 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm_45a7dec0-345b-4689-b7b6-9d429648f610/util/0.log" Dec 15 10:26:28 crc kubenswrapper[4876]: I1215 10:26:28.437329 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm_45a7dec0-345b-4689-b7b6-9d429648f610/pull/0.log" Dec 15 10:26:28 crc kubenswrapper[4876]: I1215 10:26:28.612581 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm_45a7dec0-345b-4689-b7b6-9d429648f610/util/0.log" Dec 15 10:26:28 crc kubenswrapper[4876]: I1215 10:26:28.612905 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm_45a7dec0-345b-4689-b7b6-9d429648f610/extract/0.log" Dec 15 10:26:28 crc kubenswrapper[4876]: I1215 10:26:28.637574 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931arzvhm_45a7dec0-345b-4689-b7b6-9d429648f610/pull/0.log" Dec 15 10:26:28 crc kubenswrapper[4876]: I1215 10:26:28.773771 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6_eaddc445-5da1-4790-8af0-37a4ea5f3350/util/0.log" Dec 15 10:26:28 crc kubenswrapper[4876]: I1215 10:26:28.996079 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6_eaddc445-5da1-4790-8af0-37a4ea5f3350/util/0.log" Dec 15 10:26:28 crc kubenswrapper[4876]: I1215 10:26:28.997515 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6_eaddc445-5da1-4790-8af0-37a4ea5f3350/pull/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.007565 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6_eaddc445-5da1-4790-8af0-37a4ea5f3350/pull/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.198444 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6_eaddc445-5da1-4790-8af0-37a4ea5f3350/util/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.223382 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6_eaddc445-5da1-4790-8af0-37a4ea5f3350/pull/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.223936 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4mp4c6_eaddc445-5da1-4790-8af0-37a4ea5f3350/extract/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.367617 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz_80fe6462-83b6-4b61-a76e-f2db98998c80/util/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.611644 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz_80fe6462-83b6-4b61-a76e-f2db98998c80/pull/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.614572 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz_80fe6462-83b6-4b61-a76e-f2db98998c80/util/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.617289 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz_80fe6462-83b6-4b61-a76e-f2db98998c80/pull/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.787241 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz_80fe6462-83b6-4b61-a76e-f2db98998c80/util/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.831910 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz_80fe6462-83b6-4b61-a76e-f2db98998c80/pull/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.832969 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c92102lxkz_80fe6462-83b6-4b61-a76e-f2db98998c80/extract/0.log" Dec 15 10:26:29 crc kubenswrapper[4876]: I1215 10:26:29.966385 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9_870ffce7-be32-45bc-811e-39cf12bdef1f/util/0.log" Dec 15 10:26:30 crc kubenswrapper[4876]: I1215 10:26:30.146838 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9_870ffce7-be32-45bc-811e-39cf12bdef1f/util/0.log" Dec 15 10:26:30 crc kubenswrapper[4876]: I1215 10:26:30.199931 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9_870ffce7-be32-45bc-811e-39cf12bdef1f/pull/0.log" Dec 15 10:26:30 crc kubenswrapper[4876]: I1215 10:26:30.210080 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9_870ffce7-be32-45bc-811e-39cf12bdef1f/pull/0.log" Dec 15 10:26:30 crc kubenswrapper[4876]: I1215 10:26:30.378410 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9_870ffce7-be32-45bc-811e-39cf12bdef1f/util/0.log" Dec 15 10:26:30 crc kubenswrapper[4876]: I1215 10:26:30.379240 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9_870ffce7-be32-45bc-811e-39cf12bdef1f/extract/0.log" Dec 15 10:26:30 crc kubenswrapper[4876]: I1215 10:26:30.411287 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa82ppr9_870ffce7-be32-45bc-811e-39cf12bdef1f/pull/0.log" Dec 15 10:26:30 crc kubenswrapper[4876]: I1215 10:26:30.599839 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t246z_7aa3a255-de24-4937-8163-97efcdd0caed/extract-utilities/0.log" Dec 15 10:26:30 crc kubenswrapper[4876]: I1215 10:26:30.779524 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t246z_7aa3a255-de24-4937-8163-97efcdd0caed/extract-content/0.log" Dec 15 10:26:30 crc kubenswrapper[4876]: I1215 10:26:30.787190 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t246z_7aa3a255-de24-4937-8163-97efcdd0caed/extract-utilities/0.log" Dec 15 10:26:30 crc kubenswrapper[4876]: I1215 10:26:30.817025 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t246z_7aa3a255-de24-4937-8163-97efcdd0caed/extract-content/0.log" Dec 15 10:26:31 crc kubenswrapper[4876]: I1215 10:26:31.059240 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t246z_7aa3a255-de24-4937-8163-97efcdd0caed/extract-content/0.log" Dec 15 10:26:31 crc kubenswrapper[4876]: I1215 10:26:31.060444 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t246z_7aa3a255-de24-4937-8163-97efcdd0caed/extract-utilities/0.log" Dec 15 10:26:31 crc kubenswrapper[4876]: I1215 10:26:31.268138 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fwlwf_b8832c04-2ac1-4a03-8b37-c7f16b0742f4/extract-utilities/0.log" Dec 15 10:26:31 crc kubenswrapper[4876]: I1215 10:26:31.408640 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t246z_7aa3a255-de24-4937-8163-97efcdd0caed/registry-server/0.log" Dec 15 10:26:31 crc kubenswrapper[4876]: I1215 10:26:31.581863 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fwlwf_b8832c04-2ac1-4a03-8b37-c7f16b0742f4/extract-utilities/0.log" Dec 15 10:26:31 crc kubenswrapper[4876]: I1215 10:26:31.587434 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fwlwf_b8832c04-2ac1-4a03-8b37-c7f16b0742f4/extract-content/0.log" Dec 15 10:26:31 crc kubenswrapper[4876]: I1215 10:26:31.604218 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fwlwf_b8832c04-2ac1-4a03-8b37-c7f16b0742f4/extract-content/0.log" Dec 15 10:26:31 crc kubenswrapper[4876]: I1215 10:26:31.773384 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fwlwf_b8832c04-2ac1-4a03-8b37-c7f16b0742f4/extract-utilities/0.log" Dec 15 10:26:31 crc kubenswrapper[4876]: I1215 10:26:31.789854 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fwlwf_b8832c04-2ac1-4a03-8b37-c7f16b0742f4/extract-content/0.log" Dec 15 10:26:32 crc kubenswrapper[4876]: I1215 10:26:32.090303 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-rhjl6_d2a29149-46f3-4504-98d2-251c30b194ab/marketplace-operator/0.log" Dec 15 10:26:32 crc kubenswrapper[4876]: I1215 10:26:32.183574 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5kn4w_0e9f66f7-7322-460d-ba14-60077f9c04a6/extract-utilities/0.log" Dec 15 10:26:32 crc kubenswrapper[4876]: I1215 10:26:32.445783 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5kn4w_0e9f66f7-7322-460d-ba14-60077f9c04a6/extract-content/0.log" Dec 15 10:26:32 crc kubenswrapper[4876]: I1215 10:26:32.481837 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5kn4w_0e9f66f7-7322-460d-ba14-60077f9c04a6/extract-content/0.log" Dec 15 10:26:32 crc kubenswrapper[4876]: I1215 10:26:32.581707 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5kn4w_0e9f66f7-7322-460d-ba14-60077f9c04a6/extract-utilities/0.log" Dec 15 10:26:32 crc kubenswrapper[4876]: I1215 10:26:32.682173 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5kn4w_0e9f66f7-7322-460d-ba14-60077f9c04a6/extract-utilities/0.log" Dec 15 10:26:32 crc kubenswrapper[4876]: I1215 10:26:32.805816 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5kn4w_0e9f66f7-7322-460d-ba14-60077f9c04a6/extract-content/0.log" Dec 15 10:26:33 crc kubenswrapper[4876]: I1215 10:26:33.002646 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6cv4b_97b21136-eb6d-45b3-bd82-f2854cc3040e/extract-utilities/0.log" Dec 15 10:26:33 crc kubenswrapper[4876]: I1215 10:26:33.214481 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6cv4b_97b21136-eb6d-45b3-bd82-f2854cc3040e/extract-content/0.log" Dec 15 10:26:33 crc kubenswrapper[4876]: I1215 10:26:33.269594 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6cv4b_97b21136-eb6d-45b3-bd82-f2854cc3040e/extract-utilities/0.log" Dec 15 10:26:33 crc kubenswrapper[4876]: I1215 10:26:33.327246 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6cv4b_97b21136-eb6d-45b3-bd82-f2854cc3040e/extract-content/0.log" Dec 15 10:26:33 crc kubenswrapper[4876]: I1215 10:26:33.453019 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6cv4b_97b21136-eb6d-45b3-bd82-f2854cc3040e/extract-content/0.log" Dec 15 10:26:33 crc kubenswrapper[4876]: I1215 10:26:33.473395 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6cv4b_97b21136-eb6d-45b3-bd82-f2854cc3040e/extract-utilities/0.log" Dec 15 10:26:33 crc kubenswrapper[4876]: I1215 10:26:33.537828 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-5kn4w_0e9f66f7-7322-460d-ba14-60077f9c04a6/registry-server/0.log" Dec 15 10:26:33 crc kubenswrapper[4876]: I1215 10:26:33.748426 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcbtr_ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52/extract-utilities/0.log" Dec 15 10:26:33 crc kubenswrapper[4876]: I1215 10:26:33.979847 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcbtr_ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52/extract-utilities/0.log" Dec 15 10:26:34 crc kubenswrapper[4876]: I1215 10:26:34.030262 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcbtr_ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52/extract-content/0.log" Dec 15 10:26:34 crc kubenswrapper[4876]: I1215 10:26:34.041865 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcbtr_ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52/extract-content/0.log" Dec 15 10:26:34 crc kubenswrapper[4876]: I1215 10:26:34.253870 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcbtr_ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52/registry-server/0.log" Dec 15 10:26:34 crc kubenswrapper[4876]: I1215 10:26:34.261070 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcbtr_ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52/extract-utilities/0.log" Dec 15 10:26:34 crc kubenswrapper[4876]: I1215 10:26:34.325388 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-pcbtr_ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52/extract-content/0.log" Dec 15 10:26:34 crc kubenswrapper[4876]: I1215 10:26:34.566562 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fwlwf_b8832c04-2ac1-4a03-8b37-c7f16b0742f4/registry-server/0.log" Dec 15 10:26:34 crc kubenswrapper[4876]: I1215 10:26:34.577197 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-6cv4b_97b21136-eb6d-45b3-bd82-f2854cc3040e/registry-server/0.log" Dec 15 10:26:34 crc kubenswrapper[4876]: I1215 10:26:34.804792 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:34 crc kubenswrapper[4876]: I1215 10:26:34.856749 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:35 crc kubenswrapper[4876]: I1215 10:26:35.041569 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pcbtr"] Dec 15 10:26:35 crc kubenswrapper[4876]: I1215 10:26:35.990387 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pcbtr" podUID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerName="registry-server" containerID="cri-o://a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5" gracePeriod=2 Dec 15 10:26:36 crc kubenswrapper[4876]: I1215 10:26:36.512825 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:36 crc kubenswrapper[4876]: I1215 10:26:36.599499 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-catalog-content\") pod \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " Dec 15 10:26:36 crc kubenswrapper[4876]: I1215 10:26:36.599615 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-utilities\") pod \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " Dec 15 10:26:36 crc kubenswrapper[4876]: I1215 10:26:36.599753 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjnvk\" (UniqueName: \"kubernetes.io/projected/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-kube-api-access-wjnvk\") pod \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\" (UID: \"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52\") " Dec 15 10:26:36 crc kubenswrapper[4876]: I1215 10:26:36.600581 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-utilities" (OuterVolumeSpecName: "utilities") pod "ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" (UID: "ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:26:36 crc kubenswrapper[4876]: I1215 10:26:36.606886 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-kube-api-access-wjnvk" (OuterVolumeSpecName: "kube-api-access-wjnvk") pod "ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" (UID: "ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52"). InnerVolumeSpecName "kube-api-access-wjnvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:26:36 crc kubenswrapper[4876]: I1215 10:26:36.702933 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:26:36 crc kubenswrapper[4876]: I1215 10:26:36.702979 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjnvk\" (UniqueName: \"kubernetes.io/projected/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-kube-api-access-wjnvk\") on node \"crc\" DevicePath \"\"" Dec 15 10:26:36 crc kubenswrapper[4876]: I1215 10:26:36.727026 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" (UID: "ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:26:36 crc kubenswrapper[4876]: I1215 10:26:36.804837 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.004377 4876 generic.go:334] "Generic (PLEG): container finished" podID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerID="a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5" exitCode=0 Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.004561 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcbtr" event={"ID":"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52","Type":"ContainerDied","Data":"a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5"} Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.005173 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pcbtr" event={"ID":"ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52","Type":"ContainerDied","Data":"e5b1f41423d1edcc22fddd5a704f2a649b664a4839408851c02377c4b9ec36ff"} Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.005199 4876 scope.go:117] "RemoveContainer" containerID="a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5" Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.004637 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pcbtr" Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.046206 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pcbtr"] Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.047282 4876 scope.go:117] "RemoveContainer" containerID="24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69" Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.065041 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pcbtr"] Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.075578 4876 scope.go:117] "RemoveContainer" containerID="4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c" Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.127020 4876 scope.go:117] "RemoveContainer" containerID="a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5" Dec 15 10:26:37 crc kubenswrapper[4876]: E1215 10:26:37.127553 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5\": container with ID starting with a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5 not found: ID does not exist" containerID="a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5" Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.127600 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5"} err="failed to get container status \"a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5\": rpc error: code = NotFound desc = could not find container \"a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5\": container with ID starting with a5c6e79646dd7c69393acaa8eec6704a4d17ad31a25e25cb6f8db541b7bd89e5 not found: ID does not exist" Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.127637 4876 scope.go:117] "RemoveContainer" containerID="24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69" Dec 15 10:26:37 crc kubenswrapper[4876]: E1215 10:26:37.128036 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69\": container with ID starting with 24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69 not found: ID does not exist" containerID="24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69" Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.128074 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69"} err="failed to get container status \"24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69\": rpc error: code = NotFound desc = could not find container \"24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69\": container with ID starting with 24850d4979efa0490d6be16366d329a7375d01426390f3a87674139488cacd69 not found: ID does not exist" Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.128096 4876 scope.go:117] "RemoveContainer" containerID="4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c" Dec 15 10:26:37 crc kubenswrapper[4876]: E1215 10:26:37.128417 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c\": container with ID starting with 4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c not found: ID does not exist" containerID="4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c" Dec 15 10:26:37 crc kubenswrapper[4876]: I1215 10:26:37.128474 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c"} err="failed to get container status \"4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c\": rpc error: code = NotFound desc = could not find container \"4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c\": container with ID starting with 4a146dc71f0001337dc12d1b83cdd62df8c90b7d6797b31596ea7231fbb7648c not found: ID does not exist" Dec 15 10:26:38 crc kubenswrapper[4876]: I1215 10:26:38.718869 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" path="/var/lib/kubelet/pods/ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52/volumes" Dec 15 10:26:47 crc kubenswrapper[4876]: I1215 10:26:47.661240 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-znp52_806866ac-71bd-4a3d-813e-25cfe673c5ef/prometheus-operator/0.log" Dec 15 10:26:47 crc kubenswrapper[4876]: I1215 10:26:47.864325 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-86658f49c4-wwr94_0b76e3c5-1ab5-4f10-8d86-88084a1c678e/prometheus-operator-admission-webhook/0.log" Dec 15 10:26:47 crc kubenswrapper[4876]: I1215 10:26:47.865315 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-86658f49c4-svjfc_435444da-9b95-4e8b-8e29-f12c328cf54e/prometheus-operator-admission-webhook/0.log" Dec 15 10:26:48 crc kubenswrapper[4876]: I1215 10:26:48.126157 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-948jf_d0bafb85-b279-4dcd-88d8-5a077850fe11/perses-operator/0.log" Dec 15 10:26:48 crc kubenswrapper[4876]: I1215 10:26:48.144944 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-7vbbj_faab3f29-aea5-4f55-af4f-0fa6cb5c1547/operator/0.log" Dec 15 10:27:40 crc kubenswrapper[4876]: I1215 10:27:40.779711 4876 scope.go:117] "RemoveContainer" containerID="b78bd12a76719b062e99abad091a54062cab3f396e31181dafd483caa9dc107d" Dec 15 10:28:27 crc kubenswrapper[4876]: I1215 10:28:27.322661 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:28:27 crc kubenswrapper[4876]: I1215 10:28:27.323264 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:28:57 crc kubenswrapper[4876]: I1215 10:28:57.322996 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:28:57 crc kubenswrapper[4876]: I1215 10:28:57.324692 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:29:27 crc kubenswrapper[4876]: I1215 10:29:27.322465 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:29:27 crc kubenswrapper[4876]: I1215 10:29:27.322986 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:29:27 crc kubenswrapper[4876]: I1215 10:29:27.323042 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 10:29:27 crc kubenswrapper[4876]: I1215 10:29:27.323805 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"28248c6fa065ec3466ee74d526a7667afa9f78607dfa8c5c755858dc4bcfdc51"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 10:29:27 crc kubenswrapper[4876]: I1215 10:29:27.323849 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://28248c6fa065ec3466ee74d526a7667afa9f78607dfa8c5c755858dc4bcfdc51" gracePeriod=600 Dec 15 10:29:27 crc kubenswrapper[4876]: I1215 10:29:27.880761 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="28248c6fa065ec3466ee74d526a7667afa9f78607dfa8c5c755858dc4bcfdc51" exitCode=0 Dec 15 10:29:27 crc kubenswrapper[4876]: I1215 10:29:27.881024 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"28248c6fa065ec3466ee74d526a7667afa9f78607dfa8c5c755858dc4bcfdc51"} Dec 15 10:29:27 crc kubenswrapper[4876]: I1215 10:29:27.881517 4876 scope.go:117] "RemoveContainer" containerID="f1ccc93ddef509b9890d99374f19332a734231f2ea62c5a78d2e11dd5310dbbf" Dec 15 10:29:28 crc kubenswrapper[4876]: I1215 10:29:28.891814 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerStarted","Data":"04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9"} Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.180288 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj"] Dec 15 10:30:00 crc kubenswrapper[4876]: E1215 10:30:00.182360 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerName="extract-content" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.182402 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerName="extract-content" Dec 15 10:30:00 crc kubenswrapper[4876]: E1215 10:30:00.182432 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerName="extract-utilities" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.182442 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerName="extract-utilities" Dec 15 10:30:00 crc kubenswrapper[4876]: E1215 10:30:00.182472 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerName="registry-server" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.182482 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerName="registry-server" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.182787 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc8fd9d-4ab4-43e4-9f65-7494bafb7e52" containerName="registry-server" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.184311 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.189654 4876 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.191483 4876 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.199467 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj"] Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.334809 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsjbf\" (UniqueName: \"kubernetes.io/projected/adbd4a2e-4c64-45f2-bbbf-1efc95417195-kube-api-access-fsjbf\") pod \"collect-profiles-29429910-hnhmj\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.335700 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/adbd4a2e-4c64-45f2-bbbf-1efc95417195-secret-volume\") pod \"collect-profiles-29429910-hnhmj\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.335736 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/adbd4a2e-4c64-45f2-bbbf-1efc95417195-config-volume\") pod \"collect-profiles-29429910-hnhmj\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.437027 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/adbd4a2e-4c64-45f2-bbbf-1efc95417195-secret-volume\") pod \"collect-profiles-29429910-hnhmj\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.437084 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/adbd4a2e-4c64-45f2-bbbf-1efc95417195-config-volume\") pod \"collect-profiles-29429910-hnhmj\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.437140 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsjbf\" (UniqueName: \"kubernetes.io/projected/adbd4a2e-4c64-45f2-bbbf-1efc95417195-kube-api-access-fsjbf\") pod \"collect-profiles-29429910-hnhmj\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.439244 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/adbd4a2e-4c64-45f2-bbbf-1efc95417195-config-volume\") pod \"collect-profiles-29429910-hnhmj\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.444959 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/adbd4a2e-4c64-45f2-bbbf-1efc95417195-secret-volume\") pod \"collect-profiles-29429910-hnhmj\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.455234 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsjbf\" (UniqueName: \"kubernetes.io/projected/adbd4a2e-4c64-45f2-bbbf-1efc95417195-kube-api-access-fsjbf\") pod \"collect-profiles-29429910-hnhmj\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:00 crc kubenswrapper[4876]: I1215 10:30:00.521814 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:01 crc kubenswrapper[4876]: I1215 10:30:01.033850 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj"] Dec 15 10:30:01 crc kubenswrapper[4876]: W1215 10:30:01.051009 4876 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podadbd4a2e_4c64_45f2_bbbf_1efc95417195.slice/crio-bd3e0b0f676c2298644f27869dda5c33dee519fdb350c9c5a8d00ecd2550d858 WatchSource:0}: Error finding container bd3e0b0f676c2298644f27869dda5c33dee519fdb350c9c5a8d00ecd2550d858: Status 404 returned error can't find the container with id bd3e0b0f676c2298644f27869dda5c33dee519fdb350c9c5a8d00ecd2550d858 Dec 15 10:30:01 crc kubenswrapper[4876]: I1215 10:30:01.220693 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" event={"ID":"adbd4a2e-4c64-45f2-bbbf-1efc95417195","Type":"ContainerStarted","Data":"bd3e0b0f676c2298644f27869dda5c33dee519fdb350c9c5a8d00ecd2550d858"} Dec 15 10:30:02 crc kubenswrapper[4876]: I1215 10:30:02.230393 4876 generic.go:334] "Generic (PLEG): container finished" podID="adbd4a2e-4c64-45f2-bbbf-1efc95417195" containerID="d718d41f9dbbd83005b49a861428113784ad0e67e21806d42ddd7a0b2beec5ec" exitCode=0 Dec 15 10:30:02 crc kubenswrapper[4876]: I1215 10:30:02.230500 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" event={"ID":"adbd4a2e-4c64-45f2-bbbf-1efc95417195","Type":"ContainerDied","Data":"d718d41f9dbbd83005b49a861428113784ad0e67e21806d42ddd7a0b2beec5ec"} Dec 15 10:30:03 crc kubenswrapper[4876]: I1215 10:30:03.605852 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:03 crc kubenswrapper[4876]: I1215 10:30:03.717355 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/adbd4a2e-4c64-45f2-bbbf-1efc95417195-secret-volume\") pod \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " Dec 15 10:30:03 crc kubenswrapper[4876]: I1215 10:30:03.717736 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsjbf\" (UniqueName: \"kubernetes.io/projected/adbd4a2e-4c64-45f2-bbbf-1efc95417195-kube-api-access-fsjbf\") pod \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " Dec 15 10:30:03 crc kubenswrapper[4876]: I1215 10:30:03.718627 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/adbd4a2e-4c64-45f2-bbbf-1efc95417195-config-volume\") pod \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\" (UID: \"adbd4a2e-4c64-45f2-bbbf-1efc95417195\") " Dec 15 10:30:03 crc kubenswrapper[4876]: I1215 10:30:03.718854 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adbd4a2e-4c64-45f2-bbbf-1efc95417195-config-volume" (OuterVolumeSpecName: "config-volume") pod "adbd4a2e-4c64-45f2-bbbf-1efc95417195" (UID: "adbd4a2e-4c64-45f2-bbbf-1efc95417195"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 15 10:30:03 crc kubenswrapper[4876]: I1215 10:30:03.719466 4876 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/adbd4a2e-4c64-45f2-bbbf-1efc95417195-config-volume\") on node \"crc\" DevicePath \"\"" Dec 15 10:30:03 crc kubenswrapper[4876]: I1215 10:30:03.723498 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adbd4a2e-4c64-45f2-bbbf-1efc95417195-kube-api-access-fsjbf" (OuterVolumeSpecName: "kube-api-access-fsjbf") pod "adbd4a2e-4c64-45f2-bbbf-1efc95417195" (UID: "adbd4a2e-4c64-45f2-bbbf-1efc95417195"). InnerVolumeSpecName "kube-api-access-fsjbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:30:03 crc kubenswrapper[4876]: I1215 10:30:03.726989 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/adbd4a2e-4c64-45f2-bbbf-1efc95417195-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "adbd4a2e-4c64-45f2-bbbf-1efc95417195" (UID: "adbd4a2e-4c64-45f2-bbbf-1efc95417195"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 15 10:30:03 crc kubenswrapper[4876]: I1215 10:30:03.821647 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsjbf\" (UniqueName: \"kubernetes.io/projected/adbd4a2e-4c64-45f2-bbbf-1efc95417195-kube-api-access-fsjbf\") on node \"crc\" DevicePath \"\"" Dec 15 10:30:03 crc kubenswrapper[4876]: I1215 10:30:03.821684 4876 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/adbd4a2e-4c64-45f2-bbbf-1efc95417195-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 15 10:30:04 crc kubenswrapper[4876]: I1215 10:30:04.253860 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" event={"ID":"adbd4a2e-4c64-45f2-bbbf-1efc95417195","Type":"ContainerDied","Data":"bd3e0b0f676c2298644f27869dda5c33dee519fdb350c9c5a8d00ecd2550d858"} Dec 15 10:30:04 crc kubenswrapper[4876]: I1215 10:30:04.254194 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd3e0b0f676c2298644f27869dda5c33dee519fdb350c9c5a8d00ecd2550d858" Dec 15 10:30:04 crc kubenswrapper[4876]: I1215 10:30:04.254322 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29429910-hnhmj" Dec 15 10:30:04 crc kubenswrapper[4876]: I1215 10:30:04.681010 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z"] Dec 15 10:30:04 crc kubenswrapper[4876]: I1215 10:30:04.699772 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29429865-7b66z"] Dec 15 10:30:04 crc kubenswrapper[4876]: I1215 10:30:04.719275 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb1d9c4c-56d9-4228-95e4-bc10f78dd05a" path="/var/lib/kubelet/pods/fb1d9c4c-56d9-4228-95e4-bc10f78dd05a/volumes" Dec 15 10:30:40 crc kubenswrapper[4876]: I1215 10:30:40.916651 4876 scope.go:117] "RemoveContainer" containerID="a99a0f36c0b39c6ec06642484fb42f216c85d4375131bc758a81e5eaebf4b36a" Dec 15 10:30:46 crc kubenswrapper[4876]: I1215 10:30:46.662657 4876 generic.go:334] "Generic (PLEG): container finished" podID="f3b4535d-a931-4343-bfdd-546269700b44" containerID="7469f7d336bc8ca3627424efd52d529ed4433a60b9fee28edab3c044ef8f8df4" exitCode=0 Dec 15 10:30:46 crc kubenswrapper[4876]: I1215 10:30:46.662757 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9zspp/must-gather-4m6px" event={"ID":"f3b4535d-a931-4343-bfdd-546269700b44","Type":"ContainerDied","Data":"7469f7d336bc8ca3627424efd52d529ed4433a60b9fee28edab3c044ef8f8df4"} Dec 15 10:30:46 crc kubenswrapper[4876]: I1215 10:30:46.663843 4876 scope.go:117] "RemoveContainer" containerID="7469f7d336bc8ca3627424efd52d529ed4433a60b9fee28edab3c044ef8f8df4" Dec 15 10:30:47 crc kubenswrapper[4876]: I1215 10:30:47.100618 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9zspp_must-gather-4m6px_f3b4535d-a931-4343-bfdd-546269700b44/gather/0.log" Dec 15 10:30:58 crc kubenswrapper[4876]: I1215 10:30:58.532369 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9zspp/must-gather-4m6px"] Dec 15 10:30:58 crc kubenswrapper[4876]: I1215 10:30:58.533074 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-9zspp/must-gather-4m6px" podUID="f3b4535d-a931-4343-bfdd-546269700b44" containerName="copy" containerID="cri-o://a7a2270af194e951e4317bed9d7f6bbae5b519496399b42e0eb3d827fc01a506" gracePeriod=2 Dec 15 10:30:58 crc kubenswrapper[4876]: I1215 10:30:58.544847 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9zspp/must-gather-4m6px"] Dec 15 10:30:58 crc kubenswrapper[4876]: I1215 10:30:58.797659 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9zspp_must-gather-4m6px_f3b4535d-a931-4343-bfdd-546269700b44/copy/0.log" Dec 15 10:30:58 crc kubenswrapper[4876]: I1215 10:30:58.800412 4876 generic.go:334] "Generic (PLEG): container finished" podID="f3b4535d-a931-4343-bfdd-546269700b44" containerID="a7a2270af194e951e4317bed9d7f6bbae5b519496399b42e0eb3d827fc01a506" exitCode=143 Dec 15 10:30:58 crc kubenswrapper[4876]: I1215 10:30:58.981240 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9zspp_must-gather-4m6px_f3b4535d-a931-4343-bfdd-546269700b44/copy/0.log" Dec 15 10:30:58 crc kubenswrapper[4876]: I1215 10:30:58.982063 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/must-gather-4m6px" Dec 15 10:30:59 crc kubenswrapper[4876]: I1215 10:30:59.124072 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f3b4535d-a931-4343-bfdd-546269700b44-must-gather-output\") pod \"f3b4535d-a931-4343-bfdd-546269700b44\" (UID: \"f3b4535d-a931-4343-bfdd-546269700b44\") " Dec 15 10:30:59 crc kubenswrapper[4876]: I1215 10:30:59.124290 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pssvb\" (UniqueName: \"kubernetes.io/projected/f3b4535d-a931-4343-bfdd-546269700b44-kube-api-access-pssvb\") pod \"f3b4535d-a931-4343-bfdd-546269700b44\" (UID: \"f3b4535d-a931-4343-bfdd-546269700b44\") " Dec 15 10:30:59 crc kubenswrapper[4876]: I1215 10:30:59.146261 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3b4535d-a931-4343-bfdd-546269700b44-kube-api-access-pssvb" (OuterVolumeSpecName: "kube-api-access-pssvb") pod "f3b4535d-a931-4343-bfdd-546269700b44" (UID: "f3b4535d-a931-4343-bfdd-546269700b44"). InnerVolumeSpecName "kube-api-access-pssvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:30:59 crc kubenswrapper[4876]: I1215 10:30:59.253853 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pssvb\" (UniqueName: \"kubernetes.io/projected/f3b4535d-a931-4343-bfdd-546269700b44-kube-api-access-pssvb\") on node \"crc\" DevicePath \"\"" Dec 15 10:30:59 crc kubenswrapper[4876]: I1215 10:30:59.453857 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3b4535d-a931-4343-bfdd-546269700b44-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "f3b4535d-a931-4343-bfdd-546269700b44" (UID: "f3b4535d-a931-4343-bfdd-546269700b44"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:30:59 crc kubenswrapper[4876]: I1215 10:30:59.457780 4876 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f3b4535d-a931-4343-bfdd-546269700b44-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 15 10:30:59 crc kubenswrapper[4876]: I1215 10:30:59.811510 4876 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9zspp_must-gather-4m6px_f3b4535d-a931-4343-bfdd-546269700b44/copy/0.log" Dec 15 10:30:59 crc kubenswrapper[4876]: I1215 10:30:59.812181 4876 scope.go:117] "RemoveContainer" containerID="a7a2270af194e951e4317bed9d7f6bbae5b519496399b42e0eb3d827fc01a506" Dec 15 10:30:59 crc kubenswrapper[4876]: I1215 10:30:59.812295 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9zspp/must-gather-4m6px" Dec 15 10:30:59 crc kubenswrapper[4876]: I1215 10:30:59.831243 4876 scope.go:117] "RemoveContainer" containerID="7469f7d336bc8ca3627424efd52d529ed4433a60b9fee28edab3c044ef8f8df4" Dec 15 10:31:00 crc kubenswrapper[4876]: I1215 10:31:00.717891 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3b4535d-a931-4343-bfdd-546269700b44" path="/var/lib/kubelet/pods/f3b4535d-a931-4343-bfdd-546269700b44/volumes" Dec 15 10:31:27 crc kubenswrapper[4876]: I1215 10:31:27.322712 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:31:27 crc kubenswrapper[4876]: I1215 10:31:27.323278 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:31:57 crc kubenswrapper[4876]: I1215 10:31:57.323286 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:31:57 crc kubenswrapper[4876]: I1215 10:31:57.324021 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:32:27 crc kubenswrapper[4876]: I1215 10:32:27.322737 4876 patch_prober.go:28] interesting pod/machine-config-daemon-zdprc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 15 10:32:27 crc kubenswrapper[4876]: I1215 10:32:27.323445 4876 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 15 10:32:27 crc kubenswrapper[4876]: I1215 10:32:27.323503 4876 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" Dec 15 10:32:27 crc kubenswrapper[4876]: I1215 10:32:27.324566 4876 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9"} pod="openshift-machine-config-operator/machine-config-daemon-zdprc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 15 10:32:27 crc kubenswrapper[4876]: I1215 10:32:27.324686 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerName="machine-config-daemon" containerID="cri-o://04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" gracePeriod=600 Dec 15 10:32:27 crc kubenswrapper[4876]: E1215 10:32:27.460979 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:32:28 crc kubenswrapper[4876]: I1215 10:32:28.029275 4876 generic.go:334] "Generic (PLEG): container finished" podID="f9e29c3a-f186-4bb8-af46-82cea3a16508" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" exitCode=0 Dec 15 10:32:28 crc kubenswrapper[4876]: I1215 10:32:28.029327 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" event={"ID":"f9e29c3a-f186-4bb8-af46-82cea3a16508","Type":"ContainerDied","Data":"04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9"} Dec 15 10:32:28 crc kubenswrapper[4876]: I1215 10:32:28.029369 4876 scope.go:117] "RemoveContainer" containerID="28248c6fa065ec3466ee74d526a7667afa9f78607dfa8c5c755858dc4bcfdc51" Dec 15 10:32:28 crc kubenswrapper[4876]: I1215 10:32:28.030543 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:32:28 crc kubenswrapper[4876]: E1215 10:32:28.030860 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:32:41 crc kubenswrapper[4876]: I1215 10:32:41.705338 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:32:41 crc kubenswrapper[4876]: E1215 10:32:41.706245 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:32:54 crc kubenswrapper[4876]: I1215 10:32:54.719522 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:32:54 crc kubenswrapper[4876]: E1215 10:32:54.720498 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:33:08 crc kubenswrapper[4876]: I1215 10:33:08.706389 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:33:08 crc kubenswrapper[4876]: E1215 10:33:08.707762 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:33:23 crc kubenswrapper[4876]: I1215 10:33:23.706629 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:33:23 crc kubenswrapper[4876]: E1215 10:33:23.707961 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:33:38 crc kubenswrapper[4876]: I1215 10:33:38.706431 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:33:38 crc kubenswrapper[4876]: E1215 10:33:38.709201 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:33:51 crc kubenswrapper[4876]: I1215 10:33:51.706052 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:33:51 crc kubenswrapper[4876]: E1215 10:33:51.707765 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.728883 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x5vq2"] Dec 15 10:33:58 crc kubenswrapper[4876]: E1215 10:33:58.730552 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3b4535d-a931-4343-bfdd-546269700b44" containerName="copy" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.730573 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3b4535d-a931-4343-bfdd-546269700b44" containerName="copy" Dec 15 10:33:58 crc kubenswrapper[4876]: E1215 10:33:58.730595 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adbd4a2e-4c64-45f2-bbbf-1efc95417195" containerName="collect-profiles" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.730603 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="adbd4a2e-4c64-45f2-bbbf-1efc95417195" containerName="collect-profiles" Dec 15 10:33:58 crc kubenswrapper[4876]: E1215 10:33:58.730620 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3b4535d-a931-4343-bfdd-546269700b44" containerName="gather" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.730626 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3b4535d-a931-4343-bfdd-546269700b44" containerName="gather" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.730828 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3b4535d-a931-4343-bfdd-546269700b44" containerName="copy" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.730841 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="adbd4a2e-4c64-45f2-bbbf-1efc95417195" containerName="collect-profiles" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.730857 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3b4535d-a931-4343-bfdd-546269700b44" containerName="gather" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.732335 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.742585 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x5vq2"] Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.911135 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pj6xh\" (UniqueName: \"kubernetes.io/projected/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-kube-api-access-pj6xh\") pod \"community-operators-x5vq2\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.911275 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-utilities\") pod \"community-operators-x5vq2\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:58 crc kubenswrapper[4876]: I1215 10:33:58.911365 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-catalog-content\") pod \"community-operators-x5vq2\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.013358 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pj6xh\" (UniqueName: \"kubernetes.io/projected/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-kube-api-access-pj6xh\") pod \"community-operators-x5vq2\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.013448 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-utilities\") pod \"community-operators-x5vq2\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.013494 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-catalog-content\") pod \"community-operators-x5vq2\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.014149 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-utilities\") pod \"community-operators-x5vq2\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.014173 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-catalog-content\") pod \"community-operators-x5vq2\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.044919 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pj6xh\" (UniqueName: \"kubernetes.io/projected/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-kube-api-access-pj6xh\") pod \"community-operators-x5vq2\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.056935 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.569714 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x5vq2"] Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.966660 4876 generic.go:334] "Generic (PLEG): container finished" podID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerID="28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6" exitCode=0 Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.966749 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x5vq2" event={"ID":"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf","Type":"ContainerDied","Data":"28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6"} Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.966995 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x5vq2" event={"ID":"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf","Type":"ContainerStarted","Data":"213aff5923e61644a543804d08ec11a09cebcb387f5e89a5072020367b092315"} Dec 15 10:33:59 crc kubenswrapper[4876]: I1215 10:33:59.969200 4876 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 15 10:34:00 crc kubenswrapper[4876]: I1215 10:34:00.978751 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x5vq2" event={"ID":"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf","Type":"ContainerStarted","Data":"b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f"} Dec 15 10:34:01 crc kubenswrapper[4876]: I1215 10:34:01.989943 4876 generic.go:334] "Generic (PLEG): container finished" podID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerID="b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f" exitCode=0 Dec 15 10:34:01 crc kubenswrapper[4876]: I1215 10:34:01.990000 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x5vq2" event={"ID":"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf","Type":"ContainerDied","Data":"b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f"} Dec 15 10:34:03 crc kubenswrapper[4876]: I1215 10:34:03.001449 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x5vq2" event={"ID":"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf","Type":"ContainerStarted","Data":"c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45"} Dec 15 10:34:03 crc kubenswrapper[4876]: I1215 10:34:03.027757 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x5vq2" podStartSLOduration=2.436731679 podStartE2EDuration="5.027734704s" podCreationTimestamp="2025-12-15 10:33:58 +0000 UTC" firstStartedPulling="2025-12-15 10:33:59.968825465 +0000 UTC m=+13365.539968376" lastFinishedPulling="2025-12-15 10:34:02.5598285 +0000 UTC m=+13368.130971401" observedRunningTime="2025-12-15 10:34:03.020231617 +0000 UTC m=+13368.591374558" watchObservedRunningTime="2025-12-15 10:34:03.027734704 +0000 UTC m=+13368.598877615" Dec 15 10:34:05 crc kubenswrapper[4876]: I1215 10:34:05.705979 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:34:05 crc kubenswrapper[4876]: E1215 10:34:05.706641 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:34:09 crc kubenswrapper[4876]: I1215 10:34:09.057184 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:34:09 crc kubenswrapper[4876]: I1215 10:34:09.059255 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:34:09 crc kubenswrapper[4876]: I1215 10:34:09.105014 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:34:10 crc kubenswrapper[4876]: I1215 10:34:10.122620 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:34:10 crc kubenswrapper[4876]: I1215 10:34:10.188535 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x5vq2"] Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.094899 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x5vq2" podUID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerName="registry-server" containerID="cri-o://c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45" gracePeriod=2 Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.596748 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.699472 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj6xh\" (UniqueName: \"kubernetes.io/projected/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-kube-api-access-pj6xh\") pod \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.699714 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-utilities\") pod \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.699801 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-catalog-content\") pod \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\" (UID: \"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf\") " Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.700706 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-utilities" (OuterVolumeSpecName: "utilities") pod "dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" (UID: "dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.707308 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-kube-api-access-pj6xh" (OuterVolumeSpecName: "kube-api-access-pj6xh") pod "dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" (UID: "dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf"). InnerVolumeSpecName "kube-api-access-pj6xh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.758769 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" (UID: "dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.801752 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.801795 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:34:12 crc kubenswrapper[4876]: I1215 10:34:12.801809 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj6xh\" (UniqueName: \"kubernetes.io/projected/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf-kube-api-access-pj6xh\") on node \"crc\" DevicePath \"\"" Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.107543 4876 generic.go:334] "Generic (PLEG): container finished" podID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerID="c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45" exitCode=0 Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.107583 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x5vq2" event={"ID":"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf","Type":"ContainerDied","Data":"c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45"} Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.107621 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x5vq2" event={"ID":"dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf","Type":"ContainerDied","Data":"213aff5923e61644a543804d08ec11a09cebcb387f5e89a5072020367b092315"} Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.107642 4876 scope.go:117] "RemoveContainer" containerID="c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45" Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.107644 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x5vq2" Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.142730 4876 scope.go:117] "RemoveContainer" containerID="b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f" Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.147080 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x5vq2"] Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.163989 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x5vq2"] Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.201728 4876 scope.go:117] "RemoveContainer" containerID="28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6" Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.223887 4876 scope.go:117] "RemoveContainer" containerID="c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45" Dec 15 10:34:13 crc kubenswrapper[4876]: E1215 10:34:13.224481 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45\": container with ID starting with c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45 not found: ID does not exist" containerID="c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45" Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.224535 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45"} err="failed to get container status \"c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45\": rpc error: code = NotFound desc = could not find container \"c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45\": container with ID starting with c19696f090d246d45b46d5e0657e7d479ae807a52038364145e3968741c08a45 not found: ID does not exist" Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.224896 4876 scope.go:117] "RemoveContainer" containerID="b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f" Dec 15 10:34:13 crc kubenswrapper[4876]: E1215 10:34:13.225285 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f\": container with ID starting with b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f not found: ID does not exist" containerID="b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f" Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.225328 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f"} err="failed to get container status \"b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f\": rpc error: code = NotFound desc = could not find container \"b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f\": container with ID starting with b66bd0a7c2d357ef130aa8e9f58803f010ea6edeea2dc77ab8dbc6a219488a8f not found: ID does not exist" Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.225351 4876 scope.go:117] "RemoveContainer" containerID="28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6" Dec 15 10:34:13 crc kubenswrapper[4876]: E1215 10:34:13.225636 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6\": container with ID starting with 28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6 not found: ID does not exist" containerID="28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6" Dec 15 10:34:13 crc kubenswrapper[4876]: I1215 10:34:13.225703 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6"} err="failed to get container status \"28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6\": rpc error: code = NotFound desc = could not find container \"28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6\": container with ID starting with 28dd00b2bff8c748e2ccd12f706dcdc12cf10941537d31355641688f6b1eb5b6 not found: ID does not exist" Dec 15 10:34:14 crc kubenswrapper[4876]: I1215 10:34:14.733718 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" path="/var/lib/kubelet/pods/dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf/volumes" Dec 15 10:34:19 crc kubenswrapper[4876]: I1215 10:34:19.705571 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:34:19 crc kubenswrapper[4876]: E1215 10:34:19.706580 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.652823 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tmx9d"] Dec 15 10:34:23 crc kubenswrapper[4876]: E1215 10:34:23.653812 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerName="extract-content" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.653827 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerName="extract-content" Dec 15 10:34:23 crc kubenswrapper[4876]: E1215 10:34:23.653860 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerName="extract-utilities" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.653866 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerName="extract-utilities" Dec 15 10:34:23 crc kubenswrapper[4876]: E1215 10:34:23.653882 4876 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerName="registry-server" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.653888 4876 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerName="registry-server" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.654091 4876 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd9a0eb0-d9ea-47c6-aada-bc85476c3fdf" containerName="registry-server" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.655908 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.664916 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tmx9d"] Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.813757 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wlsf\" (UniqueName: \"kubernetes.io/projected/09052e76-4c3d-4d5e-a23a-0c77c4063722-kube-api-access-6wlsf\") pod \"certified-operators-tmx9d\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.813844 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-utilities\") pod \"certified-operators-tmx9d\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.813890 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-catalog-content\") pod \"certified-operators-tmx9d\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.916025 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-catalog-content\") pod \"certified-operators-tmx9d\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.916197 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wlsf\" (UniqueName: \"kubernetes.io/projected/09052e76-4c3d-4d5e-a23a-0c77c4063722-kube-api-access-6wlsf\") pod \"certified-operators-tmx9d\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.916266 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-utilities\") pod \"certified-operators-tmx9d\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.916740 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-catalog-content\") pod \"certified-operators-tmx9d\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.916768 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-utilities\") pod \"certified-operators-tmx9d\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.946409 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wlsf\" (UniqueName: \"kubernetes.io/projected/09052e76-4c3d-4d5e-a23a-0c77c4063722-kube-api-access-6wlsf\") pod \"certified-operators-tmx9d\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:23 crc kubenswrapper[4876]: I1215 10:34:23.985609 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:24 crc kubenswrapper[4876]: I1215 10:34:24.482966 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tmx9d"] Dec 15 10:34:25 crc kubenswrapper[4876]: I1215 10:34:25.239289 4876 generic.go:334] "Generic (PLEG): container finished" podID="09052e76-4c3d-4d5e-a23a-0c77c4063722" containerID="4d622d6e3d2e89f76379b4de14cc3b2452564271942ade69cedc0e924bf2b766" exitCode=0 Dec 15 10:34:25 crc kubenswrapper[4876]: I1215 10:34:25.239363 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmx9d" event={"ID":"09052e76-4c3d-4d5e-a23a-0c77c4063722","Type":"ContainerDied","Data":"4d622d6e3d2e89f76379b4de14cc3b2452564271942ade69cedc0e924bf2b766"} Dec 15 10:34:25 crc kubenswrapper[4876]: I1215 10:34:25.239598 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmx9d" event={"ID":"09052e76-4c3d-4d5e-a23a-0c77c4063722","Type":"ContainerStarted","Data":"04af8fc5e022f38e1d5186f5a3b130ea1d24fa01c148ff04eb6d9fd1750b179f"} Dec 15 10:34:26 crc kubenswrapper[4876]: I1215 10:34:26.253921 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmx9d" event={"ID":"09052e76-4c3d-4d5e-a23a-0c77c4063722","Type":"ContainerStarted","Data":"8c8bd0429d8781df8972677162182e077c779317acb6a932ac996875bcd506b9"} Dec 15 10:34:27 crc kubenswrapper[4876]: I1215 10:34:27.263965 4876 generic.go:334] "Generic (PLEG): container finished" podID="09052e76-4c3d-4d5e-a23a-0c77c4063722" containerID="8c8bd0429d8781df8972677162182e077c779317acb6a932ac996875bcd506b9" exitCode=0 Dec 15 10:34:27 crc kubenswrapper[4876]: I1215 10:34:27.264045 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmx9d" event={"ID":"09052e76-4c3d-4d5e-a23a-0c77c4063722","Type":"ContainerDied","Data":"8c8bd0429d8781df8972677162182e077c779317acb6a932ac996875bcd506b9"} Dec 15 10:34:27 crc kubenswrapper[4876]: E1215 10:34:27.364402 4876 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09052e76_4c3d_4d5e_a23a_0c77c4063722.slice/crio-conmon-8c8bd0429d8781df8972677162182e077c779317acb6a932ac996875bcd506b9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09052e76_4c3d_4d5e_a23a_0c77c4063722.slice/crio-8c8bd0429d8781df8972677162182e077c779317acb6a932ac996875bcd506b9.scope\": RecentStats: unable to find data in memory cache]" Dec 15 10:34:27 crc kubenswrapper[4876]: I1215 10:34:27.855338 4876 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-t42p5"] Dec 15 10:34:27 crc kubenswrapper[4876]: I1215 10:34:27.859992 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:27 crc kubenswrapper[4876]: I1215 10:34:27.875050 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t42p5"] Dec 15 10:34:27 crc kubenswrapper[4876]: I1215 10:34:27.899812 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-catalog-content\") pod \"redhat-marketplace-t42p5\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:27 crc kubenswrapper[4876]: I1215 10:34:27.899963 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhlk9\" (UniqueName: \"kubernetes.io/projected/9f04dc13-b12c-4102-b3f3-3de9ad322469-kube-api-access-bhlk9\") pod \"redhat-marketplace-t42p5\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:27 crc kubenswrapper[4876]: I1215 10:34:27.900009 4876 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-utilities\") pod \"redhat-marketplace-t42p5\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:28 crc kubenswrapper[4876]: I1215 10:34:28.005037 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhlk9\" (UniqueName: \"kubernetes.io/projected/9f04dc13-b12c-4102-b3f3-3de9ad322469-kube-api-access-bhlk9\") pod \"redhat-marketplace-t42p5\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:28 crc kubenswrapper[4876]: I1215 10:34:28.005370 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-utilities\") pod \"redhat-marketplace-t42p5\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:28 crc kubenswrapper[4876]: I1215 10:34:28.005628 4876 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-catalog-content\") pod \"redhat-marketplace-t42p5\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:28 crc kubenswrapper[4876]: I1215 10:34:28.005951 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-utilities\") pod \"redhat-marketplace-t42p5\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:28 crc kubenswrapper[4876]: I1215 10:34:28.008040 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-catalog-content\") pod \"redhat-marketplace-t42p5\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:28 crc kubenswrapper[4876]: I1215 10:34:28.026159 4876 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhlk9\" (UniqueName: \"kubernetes.io/projected/9f04dc13-b12c-4102-b3f3-3de9ad322469-kube-api-access-bhlk9\") pod \"redhat-marketplace-t42p5\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:28 crc kubenswrapper[4876]: I1215 10:34:28.214834 4876 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:28 crc kubenswrapper[4876]: I1215 10:34:28.297049 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmx9d" event={"ID":"09052e76-4c3d-4d5e-a23a-0c77c4063722","Type":"ContainerStarted","Data":"4d439f0c9096c2cbf3b1acbaa235dd60c3542bae19ebb698b77efe0b9dd9a8ee"} Dec 15 10:34:28 crc kubenswrapper[4876]: I1215 10:34:28.313220 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tmx9d" podStartSLOduration=2.782325341 podStartE2EDuration="5.313203081s" podCreationTimestamp="2025-12-15 10:34:23 +0000 UTC" firstStartedPulling="2025-12-15 10:34:25.2439378 +0000 UTC m=+13390.815080711" lastFinishedPulling="2025-12-15 10:34:27.77481554 +0000 UTC m=+13393.345958451" observedRunningTime="2025-12-15 10:34:28.312746898 +0000 UTC m=+13393.883889809" watchObservedRunningTime="2025-12-15 10:34:28.313203081 +0000 UTC m=+13393.884345982" Dec 15 10:34:28 crc kubenswrapper[4876]: I1215 10:34:28.748815 4876 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t42p5"] Dec 15 10:34:29 crc kubenswrapper[4876]: I1215 10:34:29.309779 4876 generic.go:334] "Generic (PLEG): container finished" podID="9f04dc13-b12c-4102-b3f3-3de9ad322469" containerID="0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4" exitCode=0 Dec 15 10:34:29 crc kubenswrapper[4876]: I1215 10:34:29.309984 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t42p5" event={"ID":"9f04dc13-b12c-4102-b3f3-3de9ad322469","Type":"ContainerDied","Data":"0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4"} Dec 15 10:34:29 crc kubenswrapper[4876]: I1215 10:34:29.310229 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t42p5" event={"ID":"9f04dc13-b12c-4102-b3f3-3de9ad322469","Type":"ContainerStarted","Data":"4a117cd2bb45ddc797d283963f13ceebcd842eb65ffe932e6186b93fd4801e26"} Dec 15 10:34:30 crc kubenswrapper[4876]: I1215 10:34:30.320025 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t42p5" event={"ID":"9f04dc13-b12c-4102-b3f3-3de9ad322469","Type":"ContainerStarted","Data":"58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7"} Dec 15 10:34:30 crc kubenswrapper[4876]: I1215 10:34:30.705612 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:34:30 crc kubenswrapper[4876]: E1215 10:34:30.706184 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:34:31 crc kubenswrapper[4876]: I1215 10:34:31.331139 4876 generic.go:334] "Generic (PLEG): container finished" podID="9f04dc13-b12c-4102-b3f3-3de9ad322469" containerID="58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7" exitCode=0 Dec 15 10:34:31 crc kubenswrapper[4876]: I1215 10:34:31.331191 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t42p5" event={"ID":"9f04dc13-b12c-4102-b3f3-3de9ad322469","Type":"ContainerDied","Data":"58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7"} Dec 15 10:34:32 crc kubenswrapper[4876]: I1215 10:34:32.341203 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t42p5" event={"ID":"9f04dc13-b12c-4102-b3f3-3de9ad322469","Type":"ContainerStarted","Data":"47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2"} Dec 15 10:34:32 crc kubenswrapper[4876]: I1215 10:34:32.362904 4876 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-t42p5" podStartSLOduration=2.767146015 podStartE2EDuration="5.362880184s" podCreationTimestamp="2025-12-15 10:34:27 +0000 UTC" firstStartedPulling="2025-12-15 10:34:29.313216449 +0000 UTC m=+13394.884359360" lastFinishedPulling="2025-12-15 10:34:31.908950618 +0000 UTC m=+13397.480093529" observedRunningTime="2025-12-15 10:34:32.356753334 +0000 UTC m=+13397.927896245" watchObservedRunningTime="2025-12-15 10:34:32.362880184 +0000 UTC m=+13397.934023095" Dec 15 10:34:33 crc kubenswrapper[4876]: I1215 10:34:33.985940 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:33 crc kubenswrapper[4876]: I1215 10:34:33.986302 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:34 crc kubenswrapper[4876]: I1215 10:34:34.040711 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:34 crc kubenswrapper[4876]: I1215 10:34:34.399852 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:35 crc kubenswrapper[4876]: I1215 10:34:35.035401 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tmx9d"] Dec 15 10:34:36 crc kubenswrapper[4876]: I1215 10:34:36.373982 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tmx9d" podUID="09052e76-4c3d-4d5e-a23a-0c77c4063722" containerName="registry-server" containerID="cri-o://4d439f0c9096c2cbf3b1acbaa235dd60c3542bae19ebb698b77efe0b9dd9a8ee" gracePeriod=2 Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.398072 4876 generic.go:334] "Generic (PLEG): container finished" podID="09052e76-4c3d-4d5e-a23a-0c77c4063722" containerID="4d439f0c9096c2cbf3b1acbaa235dd60c3542bae19ebb698b77efe0b9dd9a8ee" exitCode=0 Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.398637 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmx9d" event={"ID":"09052e76-4c3d-4d5e-a23a-0c77c4063722","Type":"ContainerDied","Data":"4d439f0c9096c2cbf3b1acbaa235dd60c3542bae19ebb698b77efe0b9dd9a8ee"} Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.398667 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmx9d" event={"ID":"09052e76-4c3d-4d5e-a23a-0c77c4063722","Type":"ContainerDied","Data":"04af8fc5e022f38e1d5186f5a3b130ea1d24fa01c148ff04eb6d9fd1750b179f"} Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.398680 4876 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04af8fc5e022f38e1d5186f5a3b130ea1d24fa01c148ff04eb6d9fd1750b179f" Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.399752 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.510545 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wlsf\" (UniqueName: \"kubernetes.io/projected/09052e76-4c3d-4d5e-a23a-0c77c4063722-kube-api-access-6wlsf\") pod \"09052e76-4c3d-4d5e-a23a-0c77c4063722\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.510729 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-utilities\") pod \"09052e76-4c3d-4d5e-a23a-0c77c4063722\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.510767 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-catalog-content\") pod \"09052e76-4c3d-4d5e-a23a-0c77c4063722\" (UID: \"09052e76-4c3d-4d5e-a23a-0c77c4063722\") " Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.511705 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-utilities" (OuterVolumeSpecName: "utilities") pod "09052e76-4c3d-4d5e-a23a-0c77c4063722" (UID: "09052e76-4c3d-4d5e-a23a-0c77c4063722"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.517266 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09052e76-4c3d-4d5e-a23a-0c77c4063722-kube-api-access-6wlsf" (OuterVolumeSpecName: "kube-api-access-6wlsf") pod "09052e76-4c3d-4d5e-a23a-0c77c4063722" (UID: "09052e76-4c3d-4d5e-a23a-0c77c4063722"). InnerVolumeSpecName "kube-api-access-6wlsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.574389 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09052e76-4c3d-4d5e-a23a-0c77c4063722" (UID: "09052e76-4c3d-4d5e-a23a-0c77c4063722"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.612971 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.613005 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09052e76-4c3d-4d5e-a23a-0c77c4063722-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:34:37 crc kubenswrapper[4876]: I1215 10:34:37.613015 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wlsf\" (UniqueName: \"kubernetes.io/projected/09052e76-4c3d-4d5e-a23a-0c77c4063722-kube-api-access-6wlsf\") on node \"crc\" DevicePath \"\"" Dec 15 10:34:38 crc kubenswrapper[4876]: I1215 10:34:38.215208 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:38 crc kubenswrapper[4876]: I1215 10:34:38.215558 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:38 crc kubenswrapper[4876]: I1215 10:34:38.265536 4876 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:38 crc kubenswrapper[4876]: I1215 10:34:38.407054 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tmx9d" Dec 15 10:34:38 crc kubenswrapper[4876]: I1215 10:34:38.452530 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tmx9d"] Dec 15 10:34:38 crc kubenswrapper[4876]: I1215 10:34:38.464348 4876 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:38 crc kubenswrapper[4876]: I1215 10:34:38.468937 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tmx9d"] Dec 15 10:34:38 crc kubenswrapper[4876]: I1215 10:34:38.718094 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09052e76-4c3d-4d5e-a23a-0c77c4063722" path="/var/lib/kubelet/pods/09052e76-4c3d-4d5e-a23a-0c77c4063722/volumes" Dec 15 10:34:40 crc kubenswrapper[4876]: I1215 10:34:40.638895 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t42p5"] Dec 15 10:34:40 crc kubenswrapper[4876]: I1215 10:34:40.639193 4876 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-t42p5" podUID="9f04dc13-b12c-4102-b3f3-3de9ad322469" containerName="registry-server" containerID="cri-o://47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2" gracePeriod=2 Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.153239 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.199653 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhlk9\" (UniqueName: \"kubernetes.io/projected/9f04dc13-b12c-4102-b3f3-3de9ad322469-kube-api-access-bhlk9\") pod \"9f04dc13-b12c-4102-b3f3-3de9ad322469\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.199923 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-utilities\") pod \"9f04dc13-b12c-4102-b3f3-3de9ad322469\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.199978 4876 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-catalog-content\") pod \"9f04dc13-b12c-4102-b3f3-3de9ad322469\" (UID: \"9f04dc13-b12c-4102-b3f3-3de9ad322469\") " Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.205185 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-utilities" (OuterVolumeSpecName: "utilities") pod "9f04dc13-b12c-4102-b3f3-3de9ad322469" (UID: "9f04dc13-b12c-4102-b3f3-3de9ad322469"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.205898 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f04dc13-b12c-4102-b3f3-3de9ad322469-kube-api-access-bhlk9" (OuterVolumeSpecName: "kube-api-access-bhlk9") pod "9f04dc13-b12c-4102-b3f3-3de9ad322469" (UID: "9f04dc13-b12c-4102-b3f3-3de9ad322469"). InnerVolumeSpecName "kube-api-access-bhlk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.225485 4876 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f04dc13-b12c-4102-b3f3-3de9ad322469" (UID: "9f04dc13-b12c-4102-b3f3-3de9ad322469"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.301919 4876 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-utilities\") on node \"crc\" DevicePath \"\"" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.301957 4876 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f04dc13-b12c-4102-b3f3-3de9ad322469-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.301968 4876 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhlk9\" (UniqueName: \"kubernetes.io/projected/9f04dc13-b12c-4102-b3f3-3de9ad322469-kube-api-access-bhlk9\") on node \"crc\" DevicePath \"\"" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.438802 4876 generic.go:334] "Generic (PLEG): container finished" podID="9f04dc13-b12c-4102-b3f3-3de9ad322469" containerID="47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2" exitCode=0 Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.438840 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t42p5" event={"ID":"9f04dc13-b12c-4102-b3f3-3de9ad322469","Type":"ContainerDied","Data":"47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2"} Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.438872 4876 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t42p5" event={"ID":"9f04dc13-b12c-4102-b3f3-3de9ad322469","Type":"ContainerDied","Data":"4a117cd2bb45ddc797d283963f13ceebcd842eb65ffe932e6186b93fd4801e26"} Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.438900 4876 scope.go:117] "RemoveContainer" containerID="47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.438898 4876 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t42p5" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.460329 4876 scope.go:117] "RemoveContainer" containerID="58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.494555 4876 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t42p5"] Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.506350 4876 scope.go:117] "RemoveContainer" containerID="0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.511297 4876 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-t42p5"] Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.557856 4876 scope.go:117] "RemoveContainer" containerID="47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2" Dec 15 10:34:41 crc kubenswrapper[4876]: E1215 10:34:41.558845 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2\": container with ID starting with 47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2 not found: ID does not exist" containerID="47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.558892 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2"} err="failed to get container status \"47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2\": rpc error: code = NotFound desc = could not find container \"47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2\": container with ID starting with 47fc341b8e68a2afc89208c47f4918017b57842bbfbb8d36e5fd802977f2c1b2 not found: ID does not exist" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.558918 4876 scope.go:117] "RemoveContainer" containerID="58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7" Dec 15 10:34:41 crc kubenswrapper[4876]: E1215 10:34:41.559343 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7\": container with ID starting with 58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7 not found: ID does not exist" containerID="58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.559367 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7"} err="failed to get container status \"58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7\": rpc error: code = NotFound desc = could not find container \"58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7\": container with ID starting with 58e5fba623d55d0006297e92feee5d75f98b7afcecd128002f7a21e0b56fc3d7 not found: ID does not exist" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.559383 4876 scope.go:117] "RemoveContainer" containerID="0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4" Dec 15 10:34:41 crc kubenswrapper[4876]: E1215 10:34:41.559602 4876 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4\": container with ID starting with 0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4 not found: ID does not exist" containerID="0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.559660 4876 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4"} err="failed to get container status \"0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4\": rpc error: code = NotFound desc = could not find container \"0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4\": container with ID starting with 0c05eff2861bb3501df2c12178b6b833a7dd9a4934cea3867eb7f815514e67f4 not found: ID does not exist" Dec 15 10:34:41 crc kubenswrapper[4876]: I1215 10:34:41.705574 4876 scope.go:117] "RemoveContainer" containerID="04b98fcf6c366555adb33304b75c74e598ca15ca4b7aba4967833ff422de23c9" Dec 15 10:34:41 crc kubenswrapper[4876]: E1215 10:34:41.706033 4876 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-zdprc_openshift-machine-config-operator(f9e29c3a-f186-4bb8-af46-82cea3a16508)\"" pod="openshift-machine-config-operator/machine-config-daemon-zdprc" podUID="f9e29c3a-f186-4bb8-af46-82cea3a16508" Dec 15 10:34:42 crc kubenswrapper[4876]: I1215 10:34:42.720995 4876 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f04dc13-b12c-4102-b3f3-3de9ad322469" path="/var/lib/kubelet/pods/9f04dc13-b12c-4102-b3f3-3de9ad322469/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515117762116024454 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015117762117017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015117727021016510 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015117727021015460 5ustar corecore